Date: (Wed) Jan 20, 2016
Data: Source: Training: https://www.kaggle.com/c/facial-keypoints-detection/download/training.zip
New: https://www.kaggle.com/c/facial-keypoints-detection/download/test.zip
Time period:
Based on analysis utilizing <> techniques,
Summary of key steps & error improvement stats:
Use plot.ly for interactive plots ?
varImp for randomForest crashes in caret version:6.0.41 -> submit bug report
extensions toward multiclass classification are scheduled for the next release
rm(list = ls())
set.seed(12345)
options(stringsAsFactors = FALSE)
source("~/Dropbox/datascience/R/myscript.R")
source("~/Dropbox/datascience/R/mydsutils.R")
## Loading required package: caret
## Loading required package: lattice
## Loading required package: ggplot2
source("~/Dropbox/datascience/R/myplot.R")
source("~/Dropbox/datascience/R/mypetrinet.R")
source("~/Dropbox/datascience/R/myplclust.R")
source("~/Dropbox/datascience/R/mytm.R")
# Gather all package requirements here
suppressPackageStartupMessages(require(doMC))
glbCores <- 6 # of cores on machine - 2
registerDoMC(glbCores)
suppressPackageStartupMessages(require(caret))
require(plyr)
## Loading required package: plyr
require(dplyr)
## Loading required package: dplyr
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:plyr':
##
## arrange, count, desc, failwith, id, mutate, rename, summarise,
## summarize
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
require(knitr)
## Loading required package: knitr
#source("dbgcaret.R")
#packageVersion("snow")
#require(sos); findFn("cosine", maxPages=2, sortby="MaxScore")
# Analysis control global variables
# Inputs
# url/name = "<pointer>"; if url specifies a zip file, name = "<filename>"
# sep = choose from c(NULL, "\t")
glbObsTrnFile <- list(name = "Faces_patch_mean_datafix_Train.csv")
glbObsNewFile <- list(name = "Faces_patch_mean_datafix_Test.csv") # default OR
#list(splitSpecs = list(method = NULL #select from c(NULL, "condition", "sample", "copy")
# ,nRatio = 0.3 # > 0 && < 1 if method == "sample"
# ,seed = 123 # any integer or glbObsTrnPartitionSeed if method == "sample"
# ,condition = # or 'is.na(<var>)'; '<var> <condition_operator> <value>'
# )
# )
glbInpMerge <- NULL #: default
# list(fnames = c("<fname1>", "<fname2>")) # files will be concatenated
glb_is_separate_newobs_dataset <- TRUE # or TRUE
glb_split_entity_newobs_datasets <- TRUE # FALSE not supported - use "copy" for glbObsNewFile$splitSpecs$method # select from c(FALSE, TRUE)
glbObsDropCondition <- NULL # : default
# enclose in single-quotes b/c condition might include double qoutes
# use | & ; NOT || &&
# '<condition>'
# 'grepl("^First Draft Video:", glbObsAll$Headline)'
# '(is.na(glbObsAll[, glb_rsp_var_raw]) & grepl("Train", glbObsAll[, glbFeatsId]))'
#nrow(do.call("subset",list(glbObsAll, parse(text=paste0("!(", glbObsDropCondition, ")")))))
glb_obs_repartition_train_condition <- NULL # : default
# "<condition>"
glb_max_fitobs <- NULL # or any integer
glbObsTrnPartitionSeed <- 123 # or any integer
glb_is_regression <- FALSE; glb_is_classification <- !glb_is_regression;
glb_is_binomial <- TRUE # or TRUE or FALSE
glb_rsp_var_raw <- "label"
# for classification, the response variable has to be a factor
glb_rsp_var <- "label.fctr" # or "left_eye_center_x.fctr"
# if the response factor is based on numbers/logicals e.g (0/1 OR TRUE/FALSE vs. "A"/"B"),
# or contains spaces (e.g. "Not in Labor Force")
# caret predict(..., type="prob") crashes
glb_map_rsp_raw_to_var <- #NULL
function(raw) {
# return(raw ^ 0.5)
# return(log(raw))
# return(log(1 + raw))
# return(log10(raw))
# return(exp(-raw / 2))
# ret_vals <- rep_len(NA, length(raw)); ret_vals[!is.na(raw)] <- ifelse(raw[!is.na(raw)] == 1, "Y", "N"); return(relevel(as.factor(ret_vals), ref="N"))
# as.factor(paste0("B", raw))
as.factor(gsub(" ", "\\.", raw))
}
#if glb_rsp_var_raw is numeric:
#print(summary(glbObsAll[, glb_rsp_var_raw]))
#glb_map_rsp_raw_to_var(tst <- c(NA, as.numeric(summary(glbObsAll[, glb_rsp_var_raw]))))
#if glb_rsp_var_raw is character:
#print(table(glbObsAll[, glb_rsp_var_raw]))
#glb_map_rsp_raw_to_var(tst <- c(NA, names(table(glbObsAll[, glb_rsp_var_raw]))))
glb_map_rsp_var_to_raw <- #NULL
function(var) {
# return(var ^ 2.0)
# return(exp(var))
# return(10 ^ var)
# return(-log(var) * 2)
# as.numeric(var)
# gsub("\\.", " ", levels(var)[as.numeric(var)])
levels(var)[as.numeric(var)]
# c("<=50K", " >50K")[as.numeric(var)]
# c(FALSE, TRUE)[as.numeric(var)]
}
# glb_map_rsp_var_to_raw(glb_map_rsp_raw_to_var(tst))
if ((glb_rsp_var != glb_rsp_var_raw) && is.null(glb_map_rsp_raw_to_var))
stop("glb_map_rsp_raw_to_var function expected")
# List info gathered for various columns
# <col_name>: <description>; <notes>
# currently does not handle more than 1 column; consider concatenating multiple columns
# If glbFeatsId == NULL, ".rownames <- as.numeric(row.names())" is the default
glbFeatsId <- "ImageId.x.y" # choose from c(NULL : default, "<id_feat>")
glbFeatsCategory <- "P.cor.cut.fctr" # choose from c(NULL : default, "<category_feat>")
# User-specified exclusions
# glbFeatsExcludeLcl <- c(NULL
# # Required outputs
# ,"left_eye_center_x", "left_eye_center_y"
# ,"right_eye_center_x", "right_eye_center_y"
# ,"left_eye_inner_corner_x", "left_eye_inner_corner_y"
# ,"left_eye_outer_corner_x", "left_eye_outer_corner_y"
# ,"right_eye_inner_corner_x", "right_eye_inner_corner_y"
# ,"right_eye_outer_corner_x", "right_eye_outer_corner_y"
# ,"left_eyebrow_inner_end_x", "left_eyebrow_inner_end_y"
# ,"left_eyebrow_outer_end_x", "left_eyebrow_outer_end_y"
# ,"right_eyebrow_inner_end_x", "right_eyebrow_inner_end_y"
# ,"right_eyebrow_outer_end_x", "right_eyebrow_outer_end_y"
# ,"nose_tip_x", "nose_tip_y"
# ,"mouth_left_corner_x", "mouth_left_corner_y"
# ,"mouth_right_corner_x", "mouth_right_corner_y"
# ,"mouth_center_top_lip_x", "mouth_center_top_lip_y"
# ,"mouth_center_bottom_lip_x", "mouth_center_bottom_lip_y"
# )
glbFeatsExclude <- c(NULL
# Feats that shd be excluded due to known causation by prediction variable
# , "<feat1", "<feat2>"
# Feats that are linear combinations (alias in glm)
# Feature-engineering phase -> start by excluding all features except id & category & work each one in
# ,setdiff(glbFeatsExcludeLcl, glb_rsp_var_raw)
# ,"Image.pxl.1.dgt.1"
,"ImageId","left_eye_center_x","left_eye_center_y","x","y",".pos"
# ,"P.cor","P.cor.cut.fctr"
,"P.mnkSml.1","P.mnkSml.2","P.mnkSml.3"
)
if (glb_rsp_var_raw != glb_rsp_var)
glbFeatsExclude <- union(glbFeatsExclude, glb_rsp_var_raw)
glbFeatsInteractionOnly <- list()
#glbFeatsInteractionOnly[["<child_feat>"]] <- "<parent_feat>"
glbFeatsDrop <- c(NULL
# , "<feat1>", "<feat2>"
)
glb_map_vars <- NULL # or c("<var1>", "<var2>")
glb_map_urls <- list();
# glb_map_urls[["<var1>"]] <- "<var1.url>"
glb_assign_pairs_lst <- NULL;
# glb_assign_pairs_lst[["<var1>"]] <- list(from=c(NA),
# to=c("NA.my"))
glb_assign_vars <- names(glb_assign_pairs_lst)
# Derived features; Use this mechanism to cleanse data ??? Cons: Data duplication ???
glbFeatsDerive <- list();
# glbFeatsDerive[["<feat.my.sfx>"]] <- list(
# mapfn = function(<arg1>, <arg2>) { return(function(<arg1>, <arg2>)) }
# , args = c("<arg1>", "<arg2>"))
#myprint_df(data.frame(ImageId = mapfn(glbObsAll$.src, glbObsAll$.pos)))
#data.frame(ImageId = mapfn(glbObsAll$.src, glbObsAll$.pos))[7045:7055, ]
# character
glbFeatsDerive[["ImageId.x.y"]] <- list(
mapfn = function(ImageId, x, y) {
return(paste(ImageId, sprintf("%02d", x), sprintf("%02d", y), sep = "#"))
}
, args = c("ImageId", "x", "y"))
# mapfn = function(Week) { return(substr(Week, 1, 10)) }
# mapfn = function(descriptor) { return(plyr::revalue(descriptor, c(
# "ABANDONED BUILDING" = "OTHER",
# "**" = "**"
# ))) }
# mapfn = function(description) { mod_raw <- description;
# This is here because it does not work if it's in txt_map_filename
# mod_raw <- gsub(paste0(c("\n", "\211", "\235", "\317", "\333"), collapse = "|"), " ", mod_raw)
# Don't parse for "." because of ".com"; use customized gsub for that text
# mod_raw <- gsub("(\\w)(!|\\*|,|-|/)(\\w)", "\\1\\2 \\3", mod_raw);
# Some state acrnoyms need context for separation e.g.
# LA/L.A. could either be "Louisiana" or "LosAngeles"
# modRaw <- gsub("\\bL\\.A\\.( |,|')", "LosAngeles\\1", modRaw);
# OK/O.K. could either be "Oklahoma" or "Okay"
# modRaw <- gsub("\\bACA OK\\b", "ACA OKay", modRaw);
# modRaw <- gsub("\\bNow O\\.K\\.\\b", "Now OKay", modRaw);
# PR/P.R. could either be "PuertoRico" or "Public Relations"
# modRaw <- gsub("\\bP\\.R\\. Campaign", "PublicRelations Campaign", modRaw);
# VA/V.A. could either be "Virginia" or "VeteransAdministration"
# modRaw <- gsub("\\bthe V\\.A\\.\\:", "the VeteranAffairs:", modRaw);
#
# Custom mods
# return(mod_raw) }
# numeric
# Create feature based on record position/id in data
glbFeatsDerive[[".pos"]] <- list(
mapfn = function(.rnorm) { return(1:length(.rnorm)) }
, args = c(".rnorm"))
# glbFeatsDerive[["ImageId"]] <- list(
# mapfn = function(.src, .pos) {
# # return(paste(.src, sprintf("%04d", .pos), sep = "#"))
# return(paste(.src, sprintf("%04d",
# ifelse(.src == "Train", .pos, .pos - 7049)
# ), sep = "#"))
# }
# , args = c(".src", ".pos"))
#myprint_df(data.frame(ImageId = mapfn(glbObsAll$.src, glbObsAll$.pos)))
#data.frame(ImageId = mapfn(glbObsAll$.src, glbObsAll$.pos))[7045:7055, ]
# glbFeatsDerive[["Image.pxl.1.dgt.1"]] <- list(
# # mapfn = function(Image) { return(cut(as.integer(sapply(Image, function(img) strsplit(img, " ")[[1]][1])),
# # breaks = 5)) }
# mapfn = function(Image) { return(substr(Image, 1, 1)) }
# , args = c("Image"))
# Add logs of numerics that are not distributed normally
# Derive & keep multiple transformations of the same feature, if normality is hard to achieve with just one transformation
# Right skew: logp1; sqrt; ^ 1/3; logp1(logp1); log10; exp(-<feat>/constant)
# glbFeatsDerive[["WordCount.log1p"]] <- list(
# mapfn = function(WordCount) { return(log1p(WordCount)) }
# , args = c("WordCount"))
# glbFeatsDerive[["WordCount.root2"]] <- list(
# mapfn = function(WordCount) { return(WordCount ^ (1/2)) }
# , args = c("WordCount"))
# glbFeatsDerive[["WordCount.nexp"]] <- list(
# mapfn = function(WordCount) { return(exp(-WordCount)) }
# , args = c("WordCount"))
#print(summary(glbObsAll$WordCount))
#print(summary(mapfn(glbObsAll$WordCount)))
# mapfn = function(HOSPI.COST) { return(cut(HOSPI.COST, 5, breaks = c(0, 100000, 200000, 300000, 900000), labels = NULL)) }
# mapfn = function(Rasmussen) { return(ifelse(sign(Rasmussen) >= 0, 1, 0)) }
# mapfn = function(startprice) { return(startprice ^ (1/2)) }
# mapfn = function(startprice) { return(log(startprice)) }
# mapfn = function(startprice) { return(exp(-startprice / 20)) }
# mapfn = function(startprice) { return(scale(log(startprice))) }
# mapfn = function(startprice) { return(sign(sprice.predict.diff) * (abs(sprice.predict.diff) ^ (1/10))) }
# factor
glbFeatsDerive[["P.cor.cut.fctr"]] <- list(
mapfn = function(P.cor) { return(cut(P.cor, breaks = c(-1, 0, 0.5, 0.7, 1))) }
, args = c("P.cor"))
glbFeatsDerive[["P.mnkSml.1.scld"]] <- list(
mapfn = function(P.mnkSml.1) { return(P.mnkSml.1 * 10000) }
, args = c("P.mnkSml.1"))
glbFeatsDerive[["P.mnkSml.1.scld.cut.fctr"]] <- list(
mapfn = function(P.mnkSml.1.scld) { return(cut(P.mnkSml.1.scld, breaks = c(0.00, 0.47, 0.64, 0.87, 2.90))) }
, args = c("P.mnkSml.1.scld"))
glbFeatsDerive[["P.mnkSml.2.scld"]] <- list(
mapfn = function(P.mnkSml.2) { return(P.mnkSml.2 * 1000) }
, args = c("P.mnkSml.2"))
glbFeatsDerive[["P.mnkSml.2.scld.cut.fctr"]] <- list(
mapfn = function(P.mnkSml.2.scld) { return(cut(P.mnkSml.2.scld, breaks = c(0.00, 0.86, 1.14, 1.48, 4.60))) }
, args = c("P.mnkSml.2.scld"))
glbFeatsDerive[["P.mnkSml.3.scld"]] <- list(
mapfn = function(P.mnkSml.3) { return(P.mnkSml.3 * 100) }
, args = c("P.mnkSml.3"))
glbFeatsDerive[["P.mnkSml.3.scld.cut.fctr"]] <- list(
mapfn = function(P.mnkSml.3.scld) { return(cut(P.mnkSml.3.scld, breaks = c(0.00, 0.21, 0.27, 0.35, 1.10))) }
, args = c("P.mnkSml.3.scld"))
glbFeatsDerive[["P.cosSml.cut.fctr"]] <- list(
mapfn = function(P.cosSml) { return(cut(P.cosSml, breaks = c(0.00, 0.95, 0.97, 0.98, 1.00))) }
, args = c("P.cosSml"))
# mapfn = function(PropR) { return(as.factor(ifelse(PropR >= 0.5, "Y", "N"))) }
# mapfn = function(productline, description) { as.factor(gsub(" ", "", productline)) }
# mapfn = function(purpose) { return(relevel(as.factor(purpose), ref="all_other")) }
# mapfn = function(raw) { tfr_raw <- as.character(cut(raw, 5));
# tfr_raw[is.na(tfr_raw)] <- "NA.my";
# return(as.factor(tfr_raw)) }
# mapfn = function(startprice.log10) { return(cut(startprice.log10, 3)) }
# mapfn = function(startprice.log10) { return(cut(sprice.predict.diff, c(-1000, -100, -10, -1, 0, 1, 10, 100, 1000))) }
# , args = c("<arg1>"))
# multiple args
# mapfn = function(id, date) { return(paste(as.character(id), as.character(date), sep = "#")) }
# mapfn = function(PTS, oppPTS) { return(PTS - oppPTS) }
# mapfn = function(startprice.log10.predict, startprice) {
# return(spdiff <- (10 ^ startprice.log10.predict) - startprice) }
# mapfn = function(productline, description) { as.factor(
# paste(gsub(" ", "", productline), as.numeric(nchar(description) > 0), sep = "*")) }
# mapfn = function(.src, .pos) {
# return(paste(.src, sprintf("%04d",
# ifelse(.src == "Train", .pos, .pos - 7049)
# ), sep = "#")) }
# # If glbObsAll is not sorted in the desired manner
# mapfn=function(Week) { return(coredata(lag(zoo(orderBy(~Week, glbObsAll)$ILI), -2, na.pad=TRUE))) }
# mapfn=function(ILI) { return(coredata(lag(zoo(ILI), -2, na.pad=TRUE))) }
# mapfn=function(ILI.2.lag) { return(log(ILI.2.lag)) }
# glbFeatsDerive[["<var1>"]] <- glbFeatsDerive[["<var2>"]]
glb_derive_vars <- names(glbFeatsDerive)
# tst <- "descr.my"; args_lst <- NULL; for (arg in glbFeatsDerive[[tst]]$args) args_lst[[arg]] <- glbObsAll[, arg]; print(head(args_lst[[arg]])); print(head(drv_vals <- do.call(glbFeatsDerive[[tst]]$mapfn, args_lst)));
# print(which_ix <- which(args_lst[[arg]] == 0.75)); print(drv_vals[which_ix]);
glbFeatsDateTime <- list()
# glbFeatsDateTime[["<DateTimeFeat>"]] <-
# c(format = "%Y-%m-%d %H:%M:%S", timezone = "America/New_York", impute.na = TRUE,
# last.ctg = TRUE, poly.ctg = TRUE)
glbFeatsPrice <- NULL # or c("<price_var>")
glbFeatsImage <- list() #list(Image = list(patchSize = 10)) # if patchSize not specified, no patch computation
glbFeatsText <- list()
Sys.setlocale("LC_ALL", "C") # For english
## [1] "C/C/C/C/C/en_US.UTF-8"
#glbFeatsText[["<TextFeature>"]] <- list(NULL,
# ,names = myreplacePunctuation(str_to_lower(gsub(" ", "", c(NULL,
# <comma-separated-screened-names>
# ))))
# ,rareWords = myreplacePunctuation(str_to_lower(gsub(" ", "", c(NULL,
# <comma-separated-nonSCOWL-words>
# ))))
#)
# Text Processing Step: custom modifications not present in txt_munge -> use glbFeatsDerive
# Text Processing Step: universal modifications
glb_txt_munge_filenames_pfx <- "<projectId>_mytxt_"
# Text Processing Step: tolower
# Text Processing Step: myreplacePunctuation
# Text Processing Step: removeWords
glb_txt_stop_words <- list()
# Remember to use unstemmed words
if (length(glbFeatsText) > 0) {
require(tm)
require(stringr)
glb_txt_stop_words[["<txt_var>"]] <- sort(myreplacePunctuation(str_to_lower(gsub(" ", "", c(NULL
# Remove any words from stopwords
# , setdiff(myreplacePunctuation(stopwords("english")), c("<keep_wrd1>", <keep_wrd2>"))
# Remove salutations
,"mr","mrs","dr","Rev"
# Remove misc
#,"th" # Happy [[:digit::]]+th birthday
# Remove terms present in Trn only or New only; search for "Partition post-stem"
# ,<comma-separated-terms>
# cor.y.train == NA
# ,unlist(strsplit(paste(c(NULL
# ,"<comma-separated-terms>"
# ), collapse=",")
# freq == 1; keep c("<comma-separated-terms-to-keep>")
# ,<comma-separated-terms>
# chisq.pval high (e.g. == 1); keep c("<comma-separated-terms-to-keep>")
# ,<comma-separated-terms>
# nzv.freqRatio high (e.g. >= glbFeatsNzvFreqMax); keep c("<comma-separated-terms-to-keep>")
# ,<comma-separated-terms>
)))))
}
#orderBy(~term, glb_post_stem_words_terms_df_lst[[txtFeat]][grep("^man", glb_post_stem_words_terms_df_lst[[txtFeat]]$term), ])
#glbObsAll[glb_post_stem_words_terms_mtrx_lst[[txtFeat]][, 4866] > 0, c(glb_rsp_var, txtFeat)]
# To identify terms with a specific freq
#paste0(sort(subset(glb_post_stop_words_terms_df_lst[[txtFeat]], freq == 1)$term), collapse = ",")
#paste0(sort(subset(glb_post_stem_words_terms_df_lst[[txtFeat]], freq <= 2)$term), collapse = ",")
#subset(glb_post_stem_words_terms_df_lst[[txtFeat]], term %in% c("zinger"))
# To identify terms with a specific freq &
# are not stemmed together later OR is value of color.fctr (e.g. gold)
#paste0(sort(subset(glb_post_stop_words_terms_df_lst[[txtFeat]], (freq == 1) & !(term %in% c("blacked","blemish","blocked","blocks","buying","cables","careful","carefully","changed","changing","chargers","cleanly","cleared","connect","connects","connected","contains","cosmetics","default","defaulting","defective","definitely","describe","described","devices","displays","drop","drops","engravement","excellant","excellently","feels","fix","flawlessly","frame","framing","gentle","gold","guarantee","guarantees","handled","handling","having","install","iphone","iphones","keeped","keeps","known","lights","line","lining","liquid","liquidation","looking","lots","manuals","manufacture","minis","most","mostly","network","networks","noted","opening","operated","performance","performs","person","personalized","photograph","physically","placed","places","powering","pre","previously","products","protection","purchasing","returned","rotate","rotation","running","sales","second","seconds","shipped","shuts","sides","skin","skinned","sticker","storing","thats","theres","touching","unusable","update","updates","upgrade","weeks","wrapped","verified","verify") ))$term), collapse = ",")
#print(subset(glb_post_stem_words_terms_df_lst[[txtFeat]], (freq <= 2)))
#glbObsAll[which(terms_mtrx[, 229] > 0), glbFeatsText]
# To identify terms with cor.y == NA
#orderBy(~-freq+term, subset(glb_post_stop_words_terms_df_lst[[txtFeat]], is.na(cor.y)))
#paste(sort(subset(glb_post_stop_words_terms_df_lst[[txtFeat]], is.na(cor.y))[, "term"]), collapse=",")
#orderBy(~-freq+term, subset(glb_post_stem_words_terms_df_lst[[txtFeat]], is.na(cor.y)))
# To identify terms with low cor.y.abs
#head(orderBy(~cor.y.abs+freq+term, subset(glb_post_stem_words_terms_df_lst[[txtFeat]], !is.na(cor.y))), 5)
# To identify terms with high chisq.pval
#subset(glb_post_stem_words_terms_df_lst[[txtFeat]], chisq.pval > 0.99)
#paste0(sort(subset(glb_post_stem_words_terms_df_lst[[txtFeat]], (chisq.pval > 0.99) & (freq <= 10))$term), collapse=",")
#paste0(sort(subset(glb_post_stem_words_terms_df_lst[[txtFeat]], (chisq.pval > 0.9))$term), collapse=",")
#head(orderBy(~-chisq.pval+freq+term, glb_post_stem_words_terms_df_lst[[txtFeat]]), 5)
#glbObsAll[glb_post_stem_words_terms_mtrx_lst[[txtFeat]][, 68] > 0, glbFeatsText]
#orderBy(~term, glb_post_stem_words_terms_df_lst[[txtFeat]][grep("^m", glb_post_stem_words_terms_df_lst[[txtFeat]]$term), ])
# To identify terms with high nzv.freqRatio
#summary(glb_post_stem_words_terms_df_lst[[txtFeat]]$nzv.freqRatio)
#paste0(sort(setdiff(subset(glb_post_stem_words_terms_df_lst[[txtFeat]], (nzv.freqRatio >= glbFeatsNzvFreqMax) & (freq < 10) & (chisq.pval >= 0.05))$term, c( "128gb","3g","4g","gold","ipad1","ipad3","ipad4","ipadair2","ipadmini2","manufactur","spacegray","sprint","tmobil","verizon","wifion"))), collapse=",")
# To identify obs with a txt term
#tail(orderBy(~-freq+term, glb_post_stop_words_terms_df_lst[[txtFeat]]), 20)
#mydspObs(list(descr.my.contains="non"), cols=c("color", "carrier", "cellular", "storage"))
#grep("ever", dimnames(terms_stop_mtrx)$Terms)
#which(terms_stop_mtrx[, grep("ipad", dimnames(terms_stop_mtrx)$Terms)] > 0)
#glbObsAll[which(terms_stop_mtrx[, grep("16", dimnames(terms_stop_mtrx)$Terms)[1]] > 0), c(glbFeatsCategory, "storage", txtFeat)]
# Text Processing Step: screen for names # Move to glbFeatsText specs section in order of text processing steps
# glbFeatsText[["<txtFeat>"]]$names <- myreplacePunctuation(str_to_lower(gsub(" ", "", c(NULL
# # Person names for names screening
# ,<comma-separated-list>
#
# # Company names
# ,<comma-separated-list>
#
# # Product names
# ,<comma-separated-list>
# ))))
# glbFeatsText[["<txtFeat>"]]$rareWords <- myreplacePunctuation(str_to_lower(gsub(" ", "", c(NULL
# # Words not in SCOWL db
# ,<comma-separated-list>
# ))))
# To identify char vectors post glbFeatsTextMap
#grep("six(.*)hour", glb_txt_chr_lst[[txtFeat]], ignore.case = TRUE, value = TRUE)
#grep("[S|s]ix(.*)[H|h]our", glb_txt_chr_lst[[txtFeat]], value = TRUE)
# To identify whether terms shd be synonyms
#orderBy(~term, glb_post_stop_words_terms_df_lst[[txtFeat]][grep("^moder", glb_post_stop_words_terms_df_lst[[txtFeat]]$term), ])
# term_row_df <- glb_post_stop_words_terms_df_lst[[txtFeat]][grep("^came$", glb_post_stop_words_terms_df_lst[[txtFeat]]$term), ]
#
# cor(glb_post_stop_words_terms_mtrx_lst[[txtFeat]][glbObsAll$.lcn == "Fit", term_row_df$pos], glbObsTrn[, glb_rsp_var], use="pairwise.complete.obs")
# To identify which stopped words are "close" to a txt term
#sort(cluster_vars)
# Text Processing Step: stemDocument
# To identify stemmed txt terms
#glb_post_stop_words_terms_df_lst[[txtFeat]][grep("^la$", glb_post_stop_words_terms_df_lst[[txtFeat]]$term), ]
#orderBy(~term, glb_post_stem_words_terms_df_lst[[txtFeat]][grep("^con", glb_post_stem_words_terms_df_lst[[txtFeat]]$term), ])
#glbObsAll[which(terms_stem_mtrx[, grep("use", dimnames(terms_stem_mtrx)$Terms)[[1]]] > 0), c(glbFeatsId, "productline", txtFeat)]
#glbObsAll[which(TfIdf_stem_mtrx[, 191] > 0), c(glbFeatsId, glbFeatsCategory, txtFeat)]
#glbObsAll[which(glb_post_stop_words_terms_mtrx_lst[[txtFeat]][, 6165] > 0), c(glbFeatsId, glbFeatsCategory, txtFeat)]
#which(glbObsAll$UniqueID %in% c(11915, 11926, 12198))
# Text Processing Step: mycombineSynonyms
# To identify which terms are associated with not -> combine "could not" & "couldn't"
#findAssocs(glb_full_DTM_lst[[txtFeat]], "not", 0.05)
# To identify which synonyms should be combined
#orderBy(~term, glb_post_stem_words_terms_df_lst[[txtFeat]][grep("^c", glb_post_stem_words_terms_df_lst[[txtFeat]]$term), ])
chk_comb_cor <- function(syn_lst) {
# cor(terms_stem_mtrx[glbObsAll$.src == "Train", grep("^(damag|dent|ding)$", dimnames(terms_stem_mtrx)[[2]])], glbObsTrn[, glb_rsp_var], use="pairwise.complete.obs")
print(subset(glb_post_stem_words_terms_df_lst[[txtFeat]], term %in% syn_lst$syns))
print(subset(get_corpus_terms(tm_map(glbFeatsTextCorpus[[txtFeat]], mycombineSynonyms, list(syn_lst), lazy=FALSE)), term == syn_lst$word))
# cor(terms_stop_mtrx[glbObsAll$.src == "Train", grep("^(damage|dent|ding)$", dimnames(terms_stop_mtrx)[[2]])], glbObsTrn[, glb_rsp_var], use="pairwise.complete.obs")
# cor(rowSums(terms_stop_mtrx[glbObsAll$.src == "Train", grep("^(damage|dent|ding)$", dimnames(terms_stop_mtrx)[[2]])]), glbObsTrn[, glb_rsp_var], use="pairwise.complete.obs")
}
#chk_comb_cor(syn_lst=list(word="cabl", syns=c("cabl", "cord")))
#chk_comb_cor(syn_lst=list(word="damag", syns=c("damag", "dent", "ding")))
#chk_comb_cor(syn_lst=list(word="dent", syns=c("dent", "ding")))
#chk_comb_cor(syn_lst=list(word="use", syns=c("use", "usag")))
glbFeatsTextSynonyms <- list()
# list parsed to collect glbFeatsText[[<txtFeat>]]$vldTerms
# glbFeatsTextSynonyms[["Hdln.my"]] <- list(NULL
# # people in places
# , list(word = "australia", syns = c("australia", "australian"))
# , list(word = "italy", syns = c("italy", "Italian"))
# , list(word = "newyork", syns = c("newyork", "newyorker"))
# , list(word = "Pakistan", syns = c("Pakistan", "Pakistani"))
# , list(word = "peru", syns = c("peru", "peruvian"))
# , list(word = "qatar", syns = c("qatar", "qatari"))
# , list(word = "scotland", syns = c("scotland", "scotish"))
# , list(word = "Shanghai", syns = c("Shanghai", "Shanzhai"))
# , list(word = "venezuela", syns = c("venezuela", "venezuelan"))
#
# # companies - needs to be data dependent
# # - e.g. ensure BNP in this experiment/feat always refers to BNPParibas
#
# # general synonyms
# , list(word = "Create", syns = c("Create","Creator"))
# , list(word = "cute", syns = c("cute","cutest"))
# , list(word = "Disappear", syns = c("Disappear","Fadeout"))
# , list(word = "teach", syns = c("teach", "taught"))
# , list(word = "theater", syns = c("theater", "theatre", "theatres"))
# , list(word = "understand", syns = c("understand", "understood"))
# , list(word = "weak", syns = c("weak", "weaken", "weaker", "weakest"))
# , list(word = "wealth", syns = c("wealth", "wealthi"))
#
# # custom synonyms (phrases)
#
# # custom synonyms (names)
# )
#glbFeatsTextSynonyms[["<txtFeat>"]] <- list(NULL
# , list(word="<stem1>", syns=c("<stem1>", "<stem1_2>"))
# )
for (txtFeat in names(glbFeatsTextSynonyms))
for (entryIx in 1:length(glbFeatsTextSynonyms[[txtFeat]])) {
glbFeatsTextSynonyms[[txtFeat]][[entryIx]]$word <-
str_to_lower(glbFeatsTextSynonyms[[txtFeat]][[entryIx]]$word)
glbFeatsTextSynonyms[[txtFeat]][[entryIx]]$syns <-
str_to_lower(glbFeatsTextSynonyms[[txtFeat]][[entryIx]]$syns)
}
glbFeatsTextSeed <- 181
# tm options include: check tm::weightSMART
glb_txt_terms_control <- list( # Gather model performance & run-time stats
# weighting = function(x) weightSMART(x, spec = "nnn")
# weighting = function(x) weightSMART(x, spec = "lnn")
# weighting = function(x) weightSMART(x, spec = "ann")
# weighting = function(x) weightSMART(x, spec = "bnn")
# weighting = function(x) weightSMART(x, spec = "Lnn")
#
weighting = function(x) weightSMART(x, spec = "ltn") # default
# weighting = function(x) weightSMART(x, spec = "lpn")
#
# weighting = function(x) weightSMART(x, spec = "ltc")
#
# weighting = weightBin
# weighting = weightTf
# weighting = weightTfIdf # : default
# termFreq selection criteria across obs: tm default: list(global=c(1, Inf))
, bounds = list(global = c(1, Inf))
# wordLengths selection criteria: tm default: c(3, Inf)
, wordLengths = c(1, Inf)
)
glb_txt_cor_var <- glb_rsp_var # : default # or c(<feat>)
# select one from c("union.top.val.cor", "top.cor", "top.val", default: "top.chisq", "sparse")
glbFeatsTextFilter <- "top.chisq"
glbFeatsTextTermsMax <- rep(10, length(glbFeatsText)) # :default
names(glbFeatsTextTermsMax) <- names(glbFeatsText)
# Text Processing Step: extractAssoc
glbFeatsTextAssocCor <- rep(1, length(glbFeatsText)) # :default
names(glbFeatsTextAssocCor) <- names(glbFeatsText)
# Remember to use stemmed terms
glb_important_terms <- list()
# Text Processing Step: extractPatterns (ngrams)
glbFeatsTextPatterns <- list()
#glbFeatsTextPatterns[[<txtFeat>>]] <- list()
#glbFeatsTextPatterns[[<txtFeat>>]] <- c(metropolitan.diary.colon = "Metropolitan Diary:")
# Have to set it even if it is not used
# Properties:
# numrows(glb_feats_df) << numrows(glbObsFit
# Select terms that appear in at least 0.2 * O(FP/FN(glbObsOOB)) ???
# numrows(glbObsOOB) = 1.1 * numrows(glbObsNew) ???
glb_sprs_thresholds <- NULL # or c(<txtFeat1> = 0.988, <txtFeat2> = 0.970, <txtFeat3> = 0.970)
glbFctrMaxUniqVals <- 20 # default: 20
glb_impute_na_data <- FALSE # or TRUE
glb_mice_complete.seed <- 144 # or any integer
glb_cluster <- FALSE # : default or TRUE
glb_cluster.seed <- 189 # or any integer
glb_cluster_entropy_var <- NULL # c(glb_rsp_var, as.factor(cut(glb_rsp_var, 3)), default: NULL)
glbFeatsTextClusterVarsExclude <- FALSE # default FALSE
glb_interaction_only_feats <- NULL # : default or c(<parent_feat> = "<child_feat>")
glbFeatsNzvFreqMax <- 19 # 19 : caret default
glbFeatsNzvUniqMin <- 10 # 10 : caret default
glbRFESizes <- list()
#glbRFESizes[["mdlFamily"]] <- c(4, 8, 16, 32, 64, 67, 68, 69) # Accuracy@69/70 = 0.8258
glbObsFitOutliers <- list()
# If outliers.n >= 10; consider concatenation of interaction vars
# glbObsFitOutliers[["<mdlFamily>"]] <- c(NULL
# is.na(.rstudent)
# is.na(.dffits)
# .hatvalues >= 0.99
# -38,167,642 < minmax(.rstudent) < 49,649,823
# , <comma-separated-<glbFeatsId>>
# )
glbObsFitOutliers[["All.X"]] <- c(NULL
# is.na(.rstudent)
# max(.rstudent)
# ,"Train#1456#72#28"
# is.na(.dffits)
# .hatvalues >= 0.99
# -38,167,642 < minmax(.rstudent) < 49,649,823
# , <comma-separated-<glbFeatsId>>
)
glbObsTrnOutliers <- list()
glbObsTrnOutliers[["Final"]] <- union(glbObsFitOutliers[["All.X"]],
c(NULL
))
# influence.measures: car::outlier; rstudent; dffits; hatvalues; dfbeta; dfbetas
#mdlId <- "All.X##rcv#glm"; obs_df <- fitobs_df
#mdlId <- "RFE.X.glm"; obs_df <- fitobs_df
#mdlId <- "Final.glm"; obs_df <- trnobs_df
#mdlId <- "CSM2.X.glm"; obs_df <- fitobs_df
#print(outliers <- car::outlierTest(glb_models_lst[[mdlId]]$finalModel))
#mdlIdFamily <- paste0(head(unlist(str_split(mdlId, "\\.")), -1), collapse="."); obs_df <- dplyr::filter_(obs_df, interp(~(!(var %in% glbObsFitOutliers[[mdlIdFamily]])), var = as.name(glbFeatsId))); model_diags_df <- cbind(obs_df, data.frame(.rstudent=stats::rstudent(glb_models_lst[[mdlId]]$finalModel)), data.frame(.dffits=stats::dffits(glb_models_lst[[mdlId]]$finalModel)), data.frame(.hatvalues=stats::hatvalues(glb_models_lst[[mdlId]]$finalModel)));print(summary(model_diags_df[, c(".rstudent",".dffits",".hatvalues")])); table(cut(model_diags_df$.hatvalues, breaks=c(0.00, 0.98, 0.99, 1.00)))
#print(subset(model_diags_df, is.na(.rstudent))[, glbFeatsId])
#print(model_diags_df[which.max(model_diags_df$.rstudent), ])
#print(subset(model_diags_df, is.na(.dffits))[, glbFeatsId])
#print(model_diags_df[which.min(model_diags_df$.dffits), ])
#print(subset(model_diags_df, .hatvalues > 0.99)[, glbFeatsId])
#dffits_df <- merge(dffits_df, outliers_df, by="row.names", all.x=TRUE); row.names(dffits_df) <- dffits_df$Row.names; dffits_df <- subset(dffits_df, select=-Row.names)
#dffits_df <- merge(dffits_df, glbObsFit, by="row.names", all.x=TRUE); row.names(dffits_df) <- dffits_df$Row.names; dffits_df <- subset(dffits_df, select=-Row.names)
#subset(dffits_df, !is.na(.Bonf.p))
#mdlId <- "CSM.X.glm"; vars <- myextract_actual_feats(row.names(orderBy(reformulate(c("-", paste0(mdlId, ".imp"))), myget_feats_imp(glb_models_lst[[mdlId]]))));
#model_diags_df <- glb_get_predictions(model_diags_df, mdlId, glb_rsp_var)
#obs_ix <- row.names(model_diags_df) %in% names(outliers$rstudent)[1]
#obs_ix <- which(is.na(model_diags_df$.rstudent))
#obs_ix <- which(is.na(model_diags_df$.dffits))
#myplot_parcoord(obs_df=model_diags_df[, c(glbFeatsId, glbFeatsCategory, ".rstudent", ".dffits", ".hatvalues", glb_rsp_var, paste0(glb_rsp_var, mdlId), vars[1:min(20, length(vars))])], obs_ix=obs_ix, id_var=glbFeatsId, category_var=glbFeatsCategory)
#model_diags_df[row.names(model_diags_df) %in% names(outliers$rstudent)[c(1:2)], ]
#ctgry_diags_df <- model_diags_df[model_diags_df[, glbFeatsCategory] %in% c("Unknown#0"), ]
#myplot_parcoord(obs_df=ctgry_diags_df[, c(glbFeatsId, glbFeatsCategory, ".rstudent", ".dffits", ".hatvalues", glb_rsp_var, "startprice.log10.predict.RFE.X.glmnet", indep_vars[1:20])], obs_ix=row.names(ctgry_diags_df) %in% names(outliers$rstudent)[1], id_var=glbFeatsId, category_var=glbFeatsCategory)
#table(glbObsFit[model_diags_df[, glbFeatsCategory] %in% c("iPad1#1"), "startprice.log10.cut.fctr"])
#glbObsFit[model_diags_df[, glbFeatsCategory] %in% c("iPad1#1"), c(glbFeatsId, "startprice")]
# No outliers & .dffits == NaN
#myplot_parcoord(obs_df=model_diags_df[, c(glbFeatsId, glbFeatsCategory, glb_rsp_var, "startprice.log10.predict.RFE.X.glmnet", indep_vars[1:10])], obs_ix=seq(1:nrow(model_diags_df))[is.na(model_diags_df$.dffits)], id_var=glbFeatsId, category_var=glbFeatsCategory)
# Modify mdlId to (build & extract) "<FamilyId>#<Fit|Trn>#<caretMethod>#<preProc1.preProc2>#<samplingMethod>"
glb_models_lst <- list(); glb_models_df <- data.frame()
# Regression
if (glb_is_regression) {
glbMdlMethods <- c(NULL
# deterministic
#, "lm", # same as glm
, "glm", "bayesglm", "glmnet"
, "rpart"
# non-deterministic
, "gbm", "rf"
# Unknown
, "nnet" , "avNNet" # runs 25 models per cv sample for tunelength=5
, "svmLinear", "svmLinear2"
, "svmPoly" # runs 75 models per cv sample for tunelength=5
, "svmRadial"
, "earth"
, "bagEarth" # Takes a long time
)
} else
# Classification - Add ada (auto feature selection)
if (glb_is_binomial)
glbMdlMethods <- c(NULL
# deterministic
, "bagEarth" # Takes a long time
, "glm", "bayesglm", "glmnet"
, "nnet"
, "rpart"
# non-deterministic
, "gbm"
, "avNNet" # runs 25 models per cv sample for tunelength=5
, "rf"
# Unknown
, "lda", "lda2"
# svm models crash when predict is called -> internal to kernlab it should call predict without .outcome
, "svmLinear", "svmLinear2"
, "svmPoly" # runs 75 models per cv sample for tunelength=5
, "svmRadial"
, "earth"
) else
glbMdlMethods <- c(NULL
# deterministic
,"glmnet"
# non-deterministic
,"rf"
# Unknown
,"gbm","rpart"
)
glbMdlFamilies <- list(); glb_mdl_feats_lst <- list()
# family: Choose from c("RFE.X", "CSM.X", "All.X", "Best.Interact")
# methods: Choose from c(NULL, <method>, glbMdlMethods)
#glbMdlFamilies[["RFE.X"]] <- c("glmnet", "glm") # non-NULL vector is mandatory
# glbMdlFamilies[["All.X"]] <- c("glmnet", "glm") # non-NULL vector is mandatory
glbMdlFamilies[["All.X"]] <- c("glmnet") # non-NULL vector is mandatory
#glbMdlFamilies[["Best.Interact"]] <- "glmnet" # non-NULL vector is mandatory
# Check if interaction features make RFE better
# glbMdlFamilies[["CSM.X"]] <- setdiff(glbMdlMethods, c("lda", "lda2")) # crashing due to category:.clusterid ??? #c("glmnet", "glm") # non-NULL list is mandatory
# glb_mdl_feats_lst[["CSM.X"]] <- c(NULL
# , <comma-separated-features-vector>
# )
# dAFeats.CSM.X %<d-% c(NULL
# # Interaction feats up to varImp(RFE.X.glmnet) >= 50
# , <comma-separated-features-vector>
# , setdiff(myextract_actual_feats(predictors(rfe_fit_results)), c(NULL
# , <comma-separated-features-vector>
# ))
# )
# glb_mdl_feats_lst[["CSM.X"]] <- "%<d-% dAFeats.CSM.X"
glbMdlFamilies[["Final"]] <- c("glmnet", "glm") # c(NULL) # NULL vector acceptable
glbMdlAllowParallel <- list()
#glbMdlAllowParallel[["<mdlId>"]] <- FALSE
glbMdlAllowParallel[["Max.cor.Y##rcv#rpart"]] <- FALSE
glbMdlAllowParallel[["Interact.High.cor.Y##rcv#glmnet"]] <- FALSE
glbMdlAllowParallel[["Low.cor.X##rcv#glmnet"]] <- FALSE
glbMdlAllowParallel[["All.X##rcv#glmnet"]] <- FALSE
# glbMdlAllowParallel[["All.X##rcv#glm"]] <- FALSE
glbMdlAllowParallel[["Final##rcv#glmnet"]] <- FALSE
glbMdlAllowParallel[["Final##rcv#glm"]] <- FALSE
# Check if tuning parameters make fit better; make it mdlFamily customizable ?
glbMdlTuneParams <- data.frame()
# When glmnet crashes at model$grid with error: ???
glmnetTuneParams <- rbind(data.frame()
,data.frame(parameter = "alpha", vals = "0.100 0.325 0.550 0.775 1.000")
,data.frame(parameter = "lambda", vals = "9.342e-02")
)
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams,
# cbind(data.frame(mdlId = "<mdlId>"),
# glmnetTuneParams))
#avNNet
# size=[1] 3 5 7 9; decay=[0] 1e-04 0.001 0.01 0.1; bag=[FALSE]; RMSE=1.3300906
#bagEarth
# degree=1 [2] 3; nprune=64 128 256 512 [1024]; RMSE=0.6486663 (up)
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "bagEarth", parameter = "nprune", vals = "256")
# ,data.frame(method = "bagEarth", parameter = "degree", vals = "2")
# ))
#earth
# degree=[1]; nprune=2 [9] 17 25 33; RMSE=0.1334478
#gbm
# shrinkage=0.05 [0.10] 0.15 0.20 0.25; n.trees=100 150 200 [250] 300; interaction.depth=[1] 2 3 4 5; n.minobsinnode=[10]; RMSE=0.2008313
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "gbm", parameter = "shrinkage", min = 0.05, max = 0.25, by = 0.05)
# ,data.frame(method = "gbm", parameter = "n.trees", min = 100, max = 300, by = 50)
# ,data.frame(method = "gbm", parameter = "interaction.depth", min = 1, max = 5, by = 1)
# ,data.frame(method = "gbm", parameter = "n.minobsinnode", min = 10, max = 10, by = 10)
# #seq(from=0.05, to=0.25, by=0.05)
# ))
#glmnet
# alpha=0.100 [0.325] 0.550 0.775 1.000; lambda=0.0005232693 0.0024288010 0.0112734954 [0.0523269304] 0.2428800957; RMSE=0.6164891
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "glmnet", parameter = "alpha", vals = "0.550 0.775 0.8875 0.94375 1.000")
# ,data.frame(method = "glmnet", parameter = "lambda", vals = "9.858855e-05 0.0001971771 0.0009152152 0.0042480525 0.0197177130")
# ))
#nnet
# size=3 5 [7] 9 11; decay=0.0001 0.001 0.01 [0.1] 0.2; RMSE=0.9287422
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "nnet", parameter = "size", vals = "3 5 7 9 11")
# ,data.frame(method = "nnet", parameter = "decay", vals = "0.0001 0.0010 0.0100 0.1000 0.2000")
# ))
#rf # Don't bother; results are not deterministic
# mtry=2 35 68 [101] 134; RMSE=0.1339974
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "rf", parameter = "mtry", vals = "2 5 9 13 17")
# ))
#rpart
# cp=0.020 [0.025] 0.030 0.035 0.040; RMSE=0.1770237
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "rpart", parameter = "cp", vals = "0.004347826 0.008695652 0.017391304 0.021739130 0.034782609")
# ))
#svmLinear
# C=0.01 0.05 [0.10] 0.50 1.00 2.00 3.00 4.00; RMSE=0.1271318; 0.1296718
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "svmLinear", parameter = "C", vals = "0.01 0.05 0.1 0.5 1")
# ))
#svmLinear2
# cost=0.0625 0.1250 [0.25] 0.50 1.00; RMSE=0.1276354
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method = "svmLinear2", parameter = "cost", vals = "0.0625 0.125 0.25 0.5 1")
# ))
#svmPoly
# degree=[1] 2 3 4 5; scale=0.01 0.05 [0.1] 0.5 1; C=0.50 1.00 [2.00] 3.00 4.00; RMSE=0.1276130
# glbMdlTuneParams <- myrbind_df(glbMdlTuneParams, rbind(data.frame()
# ,data.frame(method="svmPoly", parameter="degree", min=1, max=5, by=1) #seq(1, 5, 1)
# ,data.frame(method="svmPoly", parameter="scale", vals="0.01, 0.05, 0.1, 0.5, 1")
# ,data.frame(method="svmPoly", parameter="C", vals="0.50, 1.00, 2.00, 3.00, 4.00")
# ))
#svmRadial
# sigma=[0.08674323]; C=0.25 0.50 1.00 [2.00] 4.00; RMSE=0.1614957
#glb2Sav(); all.equal(sav_models_df, glb_models_df)
glb_preproc_methods <- NULL
# c("YeoJohnson", "center.scale", "range", "pca", "ica", "spatialSign")
# Baseline prediction model feature(s)
glb_Baseline_mdl_var <- NULL # or c("<feat>")
glbMdlMetric_terms <- NULL # or matrix(c(
# 0,1,2,3,4,
# 2,0,1,2,3,
# 4,2,0,1,2,
# 6,4,2,0,1,
# 8,6,4,2,0
# ), byrow=TRUE, nrow=5)
glbMdlMetricSummary <- NULL # or "<metric_name>"
glbMdlMetricMaximize <- NULL # or FALSE (TRUE is not the default for both classification & regression)
glbMdlMetricSummaryFn <- NULL # or function(data, lev=NULL, model=NULL) {
# confusion_mtrx <- t(as.matrix(confusionMatrix(data$pred, data$obs)))
# #print(confusion_mtrx)
# #print(confusion_mtrx * glbMdlMetric_terms)
# metric <- sum(confusion_mtrx * glbMdlMetric_terms) / nrow(data)
# names(metric) <- glbMdlMetricSummary
# return(metric)
# }
glbMdlCheckRcv <- FALSE # Turn it on when needed; otherwise takes long time
glb_rcv_n_folds <- 3 # or NULL
glb_rcv_n_repeats <- 3 # or NULL
glb_clf_proba_threshold <- NULL # 0.5
# Model selection criteria
if (glb_is_regression)
glbMdlMetricsEval <- c("min.RMSE.OOB", "max.R.sq.OOB", "max.Adj.R.sq.fit", "min.RMSE.fit")
#glbMdlMetricsEval <- c("min.RMSE.fit", "max.R.sq.fit", "max.Adj.R.sq.fit")
if (glb_is_classification) {
if (glb_is_binomial)
glbMdlMetricsEval <-
c("max.Accuracy.OOB", "max.AUCROCR.OOB", "max.AUCpROC.OOB", "min.aic.fit", "max.Accuracy.fit") else
glbMdlMetricsEval <- c("max.Accuracy.OOB", "max.Kappa.OOB")
}
# select from NULL [no ensemble models], "auto" [all models better than MFO or Baseline], c(mdl_ids in glb_models_lst) [Typically top-rated models in auto]
glb_mdl_ensemble <- NULL
# "%<d-% setdiff(mygetEnsembleAutoMdlIds(), 'CSM.X.rf')"
# c(<comma-separated-mdlIds>
# )
# Only for classifications; for regressions remove "(.*)\\.prob" form the regex
# tmp_fitobs_df <- glbObsFit[, grep(paste0("^", gsub(".", "\\.", mygetPredictIds$value, fixed = TRUE), "CSM\\.X\\.(.*)\\.prob"), names(glbObsFit), value = TRUE)]; cor_mtrx <- cor(tmp_fitobs_df); cor_vctr <- sort(cor_mtrx[row.names(orderBy(~-Overall, varImp(glb_models_lst[["Ensemble.repeatedcv.glmnet"]])$imp))[1], ]); summary(cor_vctr); cor_vctr
#ntv.glm <- glm(reformulate(indep_vars, glb_rsp_var), family = "binomial", data = glbObsFit)
#step.glm <- step(ntv.glm)
glb_sel_mdl_id <- "All.X##rcv#glmnet" #select from c(NULL, "All.X##rcv#glmnet", "RFE.X##rcv#glmnet", <mdlId>)
glb_fin_mdl_id <- NULL #select from c(NULL, glb_sel_mdl_id)
glb_dsp_cols <- c(".pos", glbFeatsId, glbFeatsCategory, glb_rsp_var
# List critical cols excl. above
)
# Output specs
lclgetfltout_df <- function(obsout_df) {
require(tidyr)
obsout_df <- obsout_df %>%
tidyr::separate("ImageId.x.y", c(".src", ".pos", "x", "y"),
sep = "#", remove = TRUE, extra = "merge")
# mnm prefix stands for max_n_mean
mnmout_df <- obsout_df %>%
dplyr::group_by(.pos) %>%
#dplyr::top_n(1, Probability1) %>% # Score = 3.9426
#dplyr::top_n(2, Probability1) %>% # Score = ???; weighted = 3.94254;
#dplyr::top_n(3, Probability1) %>% # Score = 3.9418; weighted = 3.94169;
dplyr::top_n(4, Probability1) %>% # Score = ???; weighted = 3.94149;
#dplyr::top_n(5, Probability1) %>% # Score = 3.9421; weighted = 3.94178
# dplyr::summarize(xMeanN = mean(as.numeric(x)), yMeanN = mean(as.numeric(y)))
# dplyr::summarize(xMeanN = weighted.mean(as.numeric(x), Probability1), yMeanN = mean(as.numeric(y)))
# dplyr::summarize(xMeanN = weighted.mean(as.numeric(x), c(Probability1, 0.2357323, 0.2336925)), yMeanN = mean(as.numeric(y)))
# dplyr::summarize(xMeanN = weighted.mean(as.numeric(x), c(Probability1)), yMeanN = mean(as.numeric(y)))
dplyr::summarize(xMeanN = weighted.mean(as.numeric(x), c(Probability1)),
yMeanN = weighted.mean(as.numeric(y), c(Probability1)))
maxout_df <- obsout_df %>%
dplyr::group_by(.pos) %>%
dplyr::summarize(maxProb1 = max(Probability1))
fltout_df <- merge(maxout_df, obsout_df,
by.x = c(".pos", "maxProb1"), by.y = c(".pos", "Probability1"),
all.x = TRUE)
fmnout_df <- merge(fltout_df, mnmout_df,
by.x = c(".pos"), by.y = c(".pos"),
all.x = TRUE)
return(fmnout_df)
}
glbObsOut <- list(NULL
# glbFeatsId will be the first output column, by default
,vars = list()
# ,mapFn = function(obsout_df) {
# require(tidyr)
# smpout_df <- read.csv('data/IdLookupTable.csv')
# tmpout_df <- obsout_df %>%
# tidyr::gather(key = FeatureName, value = Location, -ImageId) %>%
# merge(smpout_df[, -4], all.y = TRUE, sort = FALSE) %>%
# select(matches("(RowId|Location)"))
# return(tmpout_df <- orderBy(~RowId, tmpout_df[, c("RowId", "Location")]))
# }
,mapFn = function(obsout_df) {
#sav_obsout_df <- obsout_df; all.equal(sav_obsout_df, obsout_df)
fltout_df <- lclgetfltout_df(obsout_df) %>%
dplyr::mutate(ImageId = as.integer(.pos)) %>%
dplyr::select(ImageId, left_eye_center_x = xMeanN, left_eye_center_y = yMeanN)
smpout_df <- read.csv('data/IdLookupTable.csv')
curout_df <- fltout_df %>%
tidyr::gather(key = FeatureName, value = Location, -ImageId) %>%
merge(smpout_df[, -4], all.y = TRUE, sort = FALSE) %>%
dplyr::select(matches("(RowId|Location)")) %>%
dplyr::filter(!is.na(Location))
prvout_df <- read.csv('Faces_patch_out.csv')
thsout_df <- rbind(curout_df, subset(prvout_df, !(RowId %in% curout_df$RowId)))
return(thsout_df <- orderBy(~RowId, thsout_df[, c("RowId", "Location")]))
}
)
glb_out_obs <- NULL # select from c(NULL : default to "new", "all", "new", "trn")
if (glb_is_classification && glb_is_binomial) {
glbObsOut$vars[["Probability1"]] <-
"%<d-% glbObsNew[, mygetPredictIds(glb_rsp_var, glb_fin_mdl_id)$prob]"
} else {
glbObsOut$vars[[glbFeatsId]] <-
"%<d-% as.integer(gsub('Test#', '', glbObsNew[, glbFeatsId]))"
glbObsOut$vars[[glb_rsp_var]] <-
"%<d-% glbObsNew[, mygetPredictIds(glb_rsp_var, glb_fin_mdl_id)$value]"
for (outVar in setdiff(glbFeatsExcludeLcl, glb_rsp_var_raw))
glbObsOut$vars[[outVar]] <-
paste0("%<d-% mean(glbObsAll[, \"", outVar, "\"], na.rm = TRUE)")
}
# glbObsOut$vars[[glb_rsp_var_raw]] <- glb_rsp_var_raw
# glbObsOut$vars[[paste0(head(unlist(strsplit(mygetPredictIds$value, "")), -1), collapse = "")]] <-
glbOutStackFnames <- NULL #: default
# c("ebayipads_txt_assoc1_out_bid1_stack.csv") # manual stack
# c("ebayipads_finmdl_bid1_out_nnet_1.csv") # universal stack
glbOut <- list(pfx = "Faces_patch_clssfr_Pcos_")
# lclImageSampleSeed <- 129
glbOutDataVizFname <- NULL # choose from c(NULL, "<projectId>_obsall.csv")
glbChunks <- list(labels = c("set_global_options_wd","set_global_options"
,"import.data","inspect.data","scrub.data","transform.data"
,"extract.features"
,"extract.features.datetime","extract.features.image","extract.features.price"
,"extract.features.text","extract.features.string"
,"extract.features.end"
,"manage.missing.data","cluster.data","partition.data.training","select.features"
,"fit.models_0","fit.models_1","fit.models_2","fit.models_3"
,"fit.data.training_0","fit.data.training_1"
,"predict.data.new"
,"display.session.info"))
# To ensure that all chunks in this script are in glbChunks
if (!is.null(chkChunksLabels <- knitr::all_labels()) && # knitr::all_labels() doesn't work in console runs
!identical(chkChunksLabels, glbChunks$labels)) {
print(sprintf("setdiff(chkChunksLabels, glbChunks$labels): %s",
setdiff(chkChunksLabels, glbChunks$labels)))
print(sprintf("setdiff(glbChunks$labels, chkChunksLabels): %s",
setdiff(glbChunks$labels, chkChunksLabels)))
}
glbChunks[["first"]] <- NULL #default: script will load envir from previous chunk
glbChunks[["last"]] <- "predict.data.new" #"extract.features.end" #NULL #default: script will save envir at end of this chunk
#mysavChunk(glbOut$pfx, glbChunks[["last"]])
# Inspect max OOB FP
#chkObsOOB <- subset(glbObsOOB, !label.fctr.All.X..rcv.glmnet.is.acc)
#chkObsOOBFP <- subset(chkObsOOB, label.fctr.All.X..rcv.glmnet == "left_eye_center") %>% dplyr::mutate(Probability1 = label.fctr.All.X..rcv.glmnet.prob) %>% select(-.src, -.pos, -x, -y) %>% lclgetfltout_df() %>% mutate(obj.distance = (((as.numeric(x) - left_eye_center_x.int) ^ 2) + ((as.numeric(y) - left_eye_center_y.int) ^ 2)) ^ 0.5) %>% dplyr::top_n(5, obj.distance) %>% dplyr::top_n(5, -patch.cor)
#
#newImgObs <- glbObsNew[(glbObsNew$ImageId == "Test#0001"), ]; print(newImgObs[which.max(newImgObs$label.fctr.Final..rcv.glmnet.prob), ])
#OOBImgObs <- glbObsOOB[(glbObsOOB$ImageId == "Train#0003"), ]; print(OOBImgObs[which.max(OOBImgObs$label.fctr.All.X..rcv.glmnet.prob), ])
#load("<scriptName>_extract.features.end.RData", verbose = TRUE)
#mygetImage(which(glbObsAll[, glbFeatsId] == "Train#0003"), names(glbFeatsImage)[1], plot = TRUE, featHighlight = c("left_eye_center_x", "left_eye_center_y"), ovrlHighlight = c(66, 35))
# Depict process
glb_analytics_pn <- petrinet(name = "glb_analytics_pn",
trans_df = data.frame(id = 1:6,
name = c("data.training.all","data.new",
"model.selected","model.final",
"data.training.all.prediction","data.new.prediction"),
x=c( -5,-5,-15,-25,-25,-35),
y=c( -5, 5, 0, 0, -5, 5)
),
places_df=data.frame(id=1:4,
name=c("bgn","fit.data.training.all","predict.data.new","end"),
x=c( -0, -20, -30, -40),
y=c( 0, 0, 0, 0),
M0=c( 3, 0, 0, 0)
),
arcs_df = data.frame(
begin = c("bgn","bgn","bgn",
"data.training.all","model.selected","fit.data.training.all",
"fit.data.training.all","model.final",
"data.new","predict.data.new",
"data.training.all.prediction","data.new.prediction"),
end = c("data.training.all","data.new","model.selected",
"fit.data.training.all","fit.data.training.all","model.final",
"data.training.all.prediction","predict.data.new",
"predict.data.new","data.new.prediction",
"end","end")
))
#print(ggplot.petrinet(glb_analytics_pn))
print(ggplot.petrinet(glb_analytics_pn) + coord_flip())
## Loading required package: grid
glb_analytics_avl_objs <- NULL
glb_chunks_df <- myadd_chunk(NULL, "import.data")
## label step_major step_minor label_minor bgn end elapsed
## 1 import.data 1 0 0 16.238 NA NA
1.0: import data## [1] "Reading file ./data/Faces_patch_mean_datafix_Train.csv..."
## [1] "dimensions of data in ./data/Faces_patch_mean_datafix_Train.csv: 35,460 rows x 11 cols"
## ImageId left_eye_center_x left_eye_center_y x y P.cor
## 1 Train#0001 66 39 64 37 0.6483014
## 2 Train#0001 66 39 68 37 0.6399592
## 3 Train#0001 66 39 66 39 0.7587717
## 4 Train#0001 66 39 64 41 0.5988636
## 5 Train#0001 66 39 68 41 0.6648811
## 6 Train#0002 64 34 62 32 0.2810694
## P.mnkSml.1 P.mnkSml.2 P.mnkSml.3 P.cosSml label
## 1 1.852771e-04 0.0028980323 0.006402820 0.9904403 .none
## 2 1.736569e-04 0.0028291468 0.006554064 0.9899424 .none
## 3 2.382601e-04 0.0032429995 0.006574647 0.9927952 left_eye_center
## 4 1.581113e-04 0.0023537917 0.004924027 0.9883924 .none
## 5 1.621569e-04 0.0024482976 0.005129307 0.9894574 .none
## 6 5.312606e-05 0.0009243212 0.002292126 0.9345549 .none
## ImageId left_eye_center_x left_eye_center_y x y P.cor
## 1225 Train#0245 63 36 65 38 0.6917165
## 5403 Train#1081 65 38 65 38 0.8293861
## 18058 Train#3565 64 40 64 40 0.5913983
## 25804 Train#5117 68 35 66 33 0.2391597
## 26084 Train#5173 65 36 63 34 0.2737859
## 35095 Train#6976 64 39 66 41 0.5401647
## P.mnkSml.1 P.mnkSml.2 P.mnkSml.3 P.cosSml label
## 1225 8.258482e-05 0.001544157 0.003928615 0.9887422 .none
## 5403 1.062013e-04 0.001873679 0.004589364 0.9879989 left_eye_center
## 18058 1.056212e-04 0.001678453 0.003829667 0.9730767 left_eye_center
## 25804 7.555874e-05 0.001222750 0.002841469 0.9755732 .none
## 26084 7.925310e-05 0.001345655 0.003229550 0.9571063 .none
## 35095 6.598860e-05 0.001210882 0.003024226 0.9573500 .none
## ImageId left_eye_center_x left_eye_center_y x y P.cor
## 35455 Train#7048 70 39 72 41 0.4656726
## 35456 Train#7049 66 43 64 41 0.6383013
## 35457 Train#7049 66 43 68 41 0.6272588
## 35458 Train#7049 66 43 66 43 0.5554197
## 35459 Train#7049 66 43 64 45 0.3192038
## 35460 Train#7049 66 43 68 45 0.3655009
## P.mnkSml.1 P.mnkSml.2 P.mnkSml.3 P.cosSml label
## 35455 8.569173e-05 0.001478852 0.003576139 0.9867620 .none
## 35456 7.970017e-05 0.001411762 0.003469985 0.9752582 .none
## 35457 8.532465e-05 0.001531350 0.003790689 0.9746540 .none
## 35458 7.563257e-05 0.001335411 0.003245203 0.9694483 left_eye_center
## 35459 6.218574e-05 0.001091221 0.002633805 0.9628933 .none
## 35460 6.354763e-05 0.001134183 0.002780978 0.9629498 .none
## 'data.frame': 35460 obs. of 11 variables:
## $ ImageId : chr "Train#0001" "Train#0001" "Train#0001" "Train#0001" ...
## $ left_eye_center_x: int 66 66 66 66 66 64 64 64 64 64 ...
## $ left_eye_center_y: int 39 39 39 39 39 34 34 34 34 34 ...
## $ x : int 64 68 66 64 68 62 66 64 62 66 ...
## $ y : int 37 37 39 41 41 32 32 34 36 36 ...
## $ P.cor : num 0.648 0.64 0.759 0.599 0.665 ...
## $ P.mnkSml.1 : num 0.000185 0.000174 0.000238 0.000158 0.000162 ...
## $ P.mnkSml.2 : num 0.0029 0.00283 0.00324 0.00235 0.00245 ...
## $ P.mnkSml.3 : num 0.0064 0.00655 0.00657 0.00492 0.00513 ...
## $ P.cosSml : num 0.99 0.99 0.993 0.988 0.989 ...
## $ label : chr ".none" ".none" "left_eye_center" ".none" ...
## - attr(*, "comment")= chr "glbObsTrn"
## NULL
## [1] "Reading file ./data/Faces_patch_mean_datafix_Test.csv..."
## [1] "dimensions of data in ./data/Faces_patch_mean_datafix_Test.csv: 44,575 rows x 8 cols"
## ImageId x y P.cor P.mnkSml.1 P.mnkSml.2 P.mnkSml.3 P.cosSml
## 1 Test#0001 63 35 0.1040824 6.595823e-05 0.001072456 0.002555425 0.9494639
## 2 Test#0001 64 35 0.1017222 6.473160e-05 0.001069503 0.002557378 0.9499080
## 3 Test#0001 65 35 0.1197947 6.484565e-05 0.001088734 0.002611326 0.9518335
## 4 Test#0001 66 35 0.1376063 6.586923e-05 0.001117344 0.002685892 0.9536543
## 5 Test#0001 67 35 0.1351653 6.686703e-05 0.001140791 0.002748192 0.9538594
## 6 Test#0001 63 36 0.2754404 6.979465e-05 0.001164562 0.002793114 0.9612893
## ImageId x y P.cor P.mnkSml.1 P.mnkSml.2 P.mnkSml.3
## 51 Test#0003 63 35 0.5789040 4.702010e-05 0.0008732316 0.002205626
## 7977 Test#0320 64 35 0.3240664 5.269959e-05 0.0009861306 0.002495133
## 17301 Test#0693 63 35 0.3389954 4.292743e-05 0.0007900602 0.002001964
## 17438 Test#0698 65 37 0.6513538 6.902011e-05 0.0012276424 0.002994085
## 17940 Test#0718 67 37 0.5298244 6.300699e-05 0.0010807082 0.002590869
## 20615 Test#0825 67 37 0.5576460 6.412791e-05 0.0011303500 0.002777806
## P.cosSml
## 51 0.9365458
## 7977 0.9140959
## 17301 0.9497737
## 17438 0.9661921
## 17940 0.9301561
## 20615 0.9499751
## ImageId x y P.cor P.mnkSml.1 P.mnkSml.2 P.mnkSml.3
## 44570 Test#1783 67 38 0.7463371 4.546165e-05 0.0008758711 0.002267880
## 44571 Test#1783 63 39 0.7556284 4.558669e-05 0.0008763673 0.002260147
## 44572 Test#1783 64 39 0.7587782 4.504997e-05 0.0008691219 0.002251319
## 44573 Test#1783 65 39 0.7587157 4.485618e-05 0.0008664185 0.002247875
## 44574 Test#1783 66 39 0.7639065 4.533850e-05 0.0008751803 0.002268640
## 44575 Test#1783 67 39 0.7673371 4.595391e-05 0.0008860827 0.002292271
## P.cosSml
## 44570 0.9444418
## 44571 0.9420536
## 44572 0.9413962
## 44573 0.9407501
## 44574 0.9418059
## 44575 0.9432346
## 'data.frame': 44575 obs. of 8 variables:
## $ ImageId : chr "Test#0001" "Test#0001" "Test#0001" "Test#0001" ...
## $ x : int 63 64 65 66 67 63 64 65 66 67 ...
## $ y : int 35 35 35 35 35 36 36 36 36 36 ...
## $ P.cor : num 0.104 0.102 0.12 0.138 0.135 ...
## $ P.mnkSml.1: num 6.60e-05 6.47e-05 6.48e-05 6.59e-05 6.69e-05 ...
## $ P.mnkSml.2: num 0.00107 0.00107 0.00109 0.00112 0.00114 ...
## $ P.mnkSml.3: num 0.00256 0.00256 0.00261 0.00269 0.00275 ...
## $ P.cosSml : num 0.949 0.95 0.952 0.954 0.954 ...
## - attr(*, "comment")= chr "glbObsNew"
## NULL
## [1] "Creating new feature: ImageId.x.y..."
## [1] "Creating new feature: .pos..."
## [1] "Creating new feature: P.cor.cut.fctr..."
## [1] "Creating new feature: P.mnkSml.1.scld..."
## [1] "Creating new feature: P.mnkSml.1.scld.cut.fctr..."
## [1] "Creating new feature: P.mnkSml.2.scld..."
## [1] "Creating new feature: P.mnkSml.2.scld.cut.fctr..."
## [1] "Creating new feature: P.mnkSml.3.scld..."
## [1] "Creating new feature: P.mnkSml.3.scld.cut.fctr..."
## [1] "Creating new feature: P.cosSml.cut.fctr..."
## [1] "Partition stats:"
## Loading required package: sqldf
## Loading required package: gsubfn
## Loading required package: proto
## Loading required package: RSQLite
## Loading required package: DBI
## Loading required package: tcltk
## label .src .n
## 1 <NA> Test 44575
## 2 .none Train 28428
## 3 left_eye_center Train 7032
## label .src .n
## 1 <NA> Test 44575
## 2 .none Train 28428
## 3 left_eye_center Train 7032
## .src .n
## 1 Test 44575
## 2 Train 35460
## Loading required package: lazyeval
## Loading required package: gdata
## gdata: read.xls support for 'XLS' (Excel 97-2004) files ENABLED.
##
## gdata: read.xls support for 'XLSX' (Excel 2007+) files ENABLED.
##
## Attaching package: 'gdata'
## The following objects are masked from 'package:dplyr':
##
## combine, first, last
## The following object is masked from 'package:stats':
##
## nobs
## The following object is masked from 'package:utils':
##
## object.size
## [1] "Found 0 duplicates by all features:"
## NULL
## label step_major step_minor label_minor bgn end elapsed
## 1 import.data 1 0 0 16.238 41.444 25.206
## 2 inspect.data 2 0 0 41.445 NA NA
2.0: inspect data## Warning: Removed 44575 rows containing non-finite values (stat_count).
## Loading required package: reshape2
## label..none label.left_eye_center label.NA
## Test NA NA 44575
## Train 28428 7032 NA
## label..none label.left_eye_center label.NA
## Test NA NA 1
## Train 0.801692 0.198308 NA
## [1] "numeric data missing in glbObsAll: "
## left_eye_center_x left_eye_center_y
## 44875 44875
## [1] "numeric data w/ 0s in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ Infs in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ NaNs in glbObsAll: "
## named integer(0)
## [1] "string data missing in glbObsAll: "
## ImageId label ImageId.x.y
## 0 NA 0
## label label.fctr .n
## 1 <NA> <NA> 44575
## 2 .none .none 28428
## 3 left_eye_center left_eye_center 7032
## Warning: Removed 1 rows containing missing values (position_stack).
## label.fctr..none label.fctr.left_eye_center label.fctr.NA
## Test NA NA 44575
## Train 28428 7032 NA
## label.fctr..none label.fctr.left_eye_center label.fctr.NA
## Test NA NA 1
## Train 0.801692 0.198308 NA
## label step_major step_minor label_minor bgn end elapsed
## 2 inspect.data 2 0 0 41.445 62.165 20.72
## 3 scrub.data 2 1 1 62.165 NA NA
2.1: scrub data## [1] "numeric data missing in glbObsAll: "
## left_eye_center_x left_eye_center_y label.fctr
## 44875 44875 44575
## [1] "numeric data w/ 0s in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ Infs in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ NaNs in glbObsAll: "
## named integer(0)
## [1] "string data missing in glbObsAll: "
## ImageId label ImageId.x.y
## 0 NA 0
## label step_major step_minor label_minor bgn end elapsed
## 3 scrub.data 2 1 1 62.165 65.892 3.728
## 4 transform.data 2 2 2 65.893 NA NA
2.2: transform data## label step_major step_minor label_minor bgn end elapsed
## 4 transform.data 2 2 2 65.893 65.933 0.04
## 5 extract.features 3 0 0 65.933 NA NA
3.0: extract features## label step_major step_minor label_minor bgn
## 5 extract.features 3 0 0 65.933
## 6 extract.features.datetime 3 1 1 65.953
## end elapsed
## 5 65.953 0.02
## 6 NA NA
3.1: extract features datetime## label step_major step_minor label_minor bgn
## 1 extract.features.datetime.bgn 1 0 0 65.977
## end elapsed
## 1 NA NA
## label step_major step_minor label_minor bgn
## 6 extract.features.datetime 3 1 1 65.953
## 7 extract.features.image 3 2 2 65.987
## end elapsed
## 6 65.986 0.033
## 7 NA NA
3.2: extract features image## label step_major step_minor label_minor bgn end
## 1 extract.features.image.bgn 1 0 0 66.02 NA
## elapsed
## 1 NA
## label step_major step_minor label_minor bgn
## 1 extract.features.image.bgn 1 0 0 66.020
## 2 extract.features.image.end 2 0 0 66.029
## end elapsed
## 1 66.028 0.008
## 2 NA NA
## label step_major step_minor label_minor bgn
## 1 extract.features.image.bgn 1 0 0 66.020
## 2 extract.features.image.end 2 0 0 66.029
## end elapsed
## 1 66.028 0.008
## 2 NA NA
## label step_major step_minor label_minor bgn end
## 7 extract.features.image 3 2 2 65.987 66.038
## 8 extract.features.price 3 3 3 66.039 NA
## elapsed
## 7 0.052
## 8 NA
3.3: extract features price## label step_major step_minor label_minor bgn end
## 1 extract.features.price.bgn 1 0 0 66.065 NA
## elapsed
## 1 NA
## label step_major step_minor label_minor bgn end
## 8 extract.features.price 3 3 3 66.039 66.074
## 9 extract.features.text 3 4 4 66.074 NA
## elapsed
## 8 0.035
## 9 NA
3.4: extract features text## label step_major step_minor label_minor bgn end
## 1 extract.features.text.bgn 1 0 0 66.121 NA
## elapsed
## 1 NA
## label step_major step_minor label_minor bgn end
## 9 extract.features.text 3 4 4 66.074 66.132
## 10 extract.features.string 3 5 5 66.133 NA
## elapsed
## 9 0.058
## 10 NA
3.5: extract features string## label step_major step_minor label_minor bgn end
## 1 extract.features.string.bgn 1 0 0 66.171 NA
## elapsed
## 1 NA
## label step_major step_minor
## 1 extract.features.string.bgn 1 0
## 2 extract.features.stringfactorize.str.vars 2 0
## label_minor bgn end elapsed
## 1 0 66.171 66.18 0.009
## 2 0 66.181 NA NA
## ImageId label .src ImageId.x.y
## "ImageId" "label" ".src" "ImageId.x.y"
## label step_major step_minor label_minor bgn end
## 10 extract.features.string 3 5 5 66.133 66.194
## 11 extract.features.end 3 6 6 66.195 NA
## elapsed
## 10 0.061
## 11 NA
3.6: extract features end## time trans "bgn " "fit.data.training.all " "predict.data.new " "end "
## 0.0000 multiple enabled transitions: data.training.all data.new model.selected firing: data.training.all
## 1.0000 1 2 1 0 0
## 1.0000 multiple enabled transitions: data.training.all data.new model.selected model.final data.training.all.prediction firing: data.new
## 2.0000 2 1 1 1 0
## label step_major step_minor label_minor bgn end
## 11 extract.features.end 3 6 6 66.195 67.071
## 12 manage.missing.data 4 0 0 67.072 NA
## elapsed
## 11 0.877
## 12 NA
4.0: manage missing data## [1] "numeric data missing in glbObsAll: "
## left_eye_center_x left_eye_center_y label.fctr
## 44875 44875 44575
## [1] "numeric data w/ 0s in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ Infs in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ NaNs in glbObsAll: "
## named integer(0)
## [1] "string data missing in glbObsAll: "
## ImageId label ImageId.x.y
## 0 NA 0
## [1] "numeric data missing in glbObsAll: "
## left_eye_center_x left_eye_center_y label.fctr
## 44875 44875 44575
## [1] "numeric data w/ 0s in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ Infs in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ NaNs in glbObsAll: "
## named integer(0)
## [1] "string data missing in glbObsAll: "
## ImageId label ImageId.x.y
## 0 NA 0
## label step_major step_minor label_minor bgn end
## 12 manage.missing.data 4 0 0 67.072 67.749
## 13 cluster.data 5 0 0 67.750 NA
## elapsed
## 12 0.677
## 13 NA
5.0: cluster data## label step_major step_minor label_minor bgn end
## 13 cluster.data 5 0 0 67.750 67.868
## 14 partition.data.training 6 0 0 67.868 NA
## elapsed
## 13 0.118
## 14 NA
6.0: partition data training## [1] "partition.data.training chunk: setup: elapsed: 0.00 secs"
## [1] "partition.data.training chunk: strata_mtrx complete: elapsed: 0.65 secs"
## [1] "partition.data.training chunk: obs_freq_df complete: elapsed: 0.65 secs"
## Loading required package: sampling
##
## Attaching package: 'sampling'
## The following objects are masked from 'package:survival':
##
## cluster, strata
## The following object is masked from 'package:caret':
##
## cluster
## [1] "partition.data.training chunk: Fit/OOB partition complete: elapsed: 1.53 secs"
## label..none label.left_eye_center label.NA
## NA NA 44575
## Fit 14215 3517 NA
## OOB 14213 3515 NA
## label..none label.left_eye_center label.NA
## NA NA 1
## Fit 0.8016580 0.1983420 NA
## OOB 0.8017261 0.1982739 NA
## P.cor.cut.fctr .n.Fit .n.OOB .n.Tst .freqRatio.Fit .freqRatio.OOB
## 2 (0,0.5] 8411 8409 23379 0.47434018 0.47433439
## 3 (0.5,0.7] 6687 6686 14117 0.37711482 0.37714350
## 4 (0.7,1] 2254 2254 4689 0.12711482 0.12714350
## 1 (-1,0] 380 379 2390 0.02143018 0.02137861
## .freqRatio.Tst
## 2 0.5244868
## 3 0.3167022
## 4 0.1051935
## 1 0.0536175
## [1] "glbObsAll: "
## [1] 80035 25
## [1] "glbObsTrn: "
## [1] 35460 25
## [1] "glbObsFit: "
## [1] 17732 24
## [1] "glbObsOOB: "
## [1] 17728 24
## [1] "glbObsNew: "
## [1] 44575 24
## [1] "partition.data.training chunk: teardown: elapsed: 2.77 secs"
## label step_major step_minor label_minor bgn end
## 14 partition.data.training 6 0 0 67.868 70.693
## 15 select.features 7 0 0 70.694 NA
## elapsed
## 14 2.825
## 15 NA
7.0: select features## [1] "cor(P.mnkSml.2.scld, P.mnkSml.3.scld)=0.9935"
## [1] "cor(label.fctr, P.mnkSml.2.scld)=0.0871"
## [1] "cor(label.fctr, P.mnkSml.3.scld)=0.0954"
## Warning in myfind_cor_features(feats_df = glb_feats_df, obs_df =
## glbObsTrn, : Identified P.mnkSml.2.scld as highly correlated with P.mnkSml.
## 3.scld
## [1] "cor(P.mnkSml.1.scld, P.mnkSml.3.scld)=0.9692"
## [1] "cor(label.fctr, P.mnkSml.1.scld)=0.0752"
## [1] "cor(label.fctr, P.mnkSml.3.scld)=0.0954"
## Warning in myfind_cor_features(feats_df = glb_feats_df, obs_df =
## glbObsTrn, : Identified P.mnkSml.1.scld as highly correlated with P.mnkSml.
## 3.scld
## [1] "cor(P.mnkSml.1.scld.cut.fctr, P.mnkSml.2.scld.cut.fctr)=0.9607"
## [1] "cor(label.fctr, P.mnkSml.1.scld.cut.fctr)=0.0545"
## [1] "cor(label.fctr, P.mnkSml.2.scld.cut.fctr)=0.0619"
## Warning in myfind_cor_features(feats_df = glb_feats_df, obs_df =
## glbObsTrn, : Identified P.mnkSml.1.scld.cut.fctr as highly correlated with
## P.mnkSml.2.scld.cut.fctr
## [1] "cor(P.mnkSml.2.scld.cut.fctr, P.mnkSml.3.scld.cut.fctr)=0.9571"
## [1] "cor(label.fctr, P.mnkSml.2.scld.cut.fctr)=0.0619"
## [1] "cor(label.fctr, P.mnkSml.3.scld.cut.fctr)=0.0699"
## Warning in myfind_cor_features(feats_df = glb_feats_df, obs_df =
## glbObsTrn, : Identified P.mnkSml.2.scld.cut.fctr as highly correlated with
## P.mnkSml.3.scld.cut.fctr
## [1] "cor(P.mnkSml.3.scld, P.mnkSml.3.scld.cut.fctr)=0.8950"
## [1] "cor(label.fctr, P.mnkSml.3.scld)=0.0954"
## [1] "cor(label.fctr, P.mnkSml.3.scld.cut.fctr)=0.0699"
## Warning in myfind_cor_features(feats_df = glb_feats_df, obs_df =
## glbObsTrn, : Identified P.mnkSml.3.scld.cut.fctr as highly correlated with
## P.mnkSml.3.scld
## [1] "cor(P.cor, P.cor.cut.fctr)=0.8702"
## [1] "cor(label.fctr, P.cor)=0.2572"
## [1] "cor(label.fctr, P.cor.cut.fctr)=0.2690"
## Warning in myfind_cor_features(feats_df = glb_feats_df, obs_df =
## glbObsTrn, : Identified P.cor as highly correlated with P.cor.cut.fctr
## cor.y exclude.as.feat cor.y.abs
## P.cor.cut.fctr 0.2689780740 0 0.2689780740
## P.cor 0.2572138070 0 0.2572138070
## P.cosSml.cut.fctr 0.1231176955 0 0.1231176955
## P.mnkSml.3 0.0954174710 1 0.0954174710
## P.mnkSml.3.scld 0.0954174710 0 0.0954174710
## P.mnkSml.2 0.0870549264 1 0.0870549264
## P.mnkSml.2.scld 0.0870549264 0 0.0870549264
## P.mnkSml.1 0.0752314908 1 0.0752314908
## P.mnkSml.1.scld 0.0752314908 0 0.0752314908
## P.cosSml 0.0712794349 0 0.0712794349
## P.mnkSml.3.scld.cut.fctr 0.0699274202 0 0.0699274202
## P.mnkSml.2.scld.cut.fctr 0.0618638506 0 0.0618638506
## P.mnkSml.1.scld.cut.fctr 0.0544794728 0 0.0544794728
## .pos 0.0030973869 1 0.0030973869
## x 0.0006481094 1 0.0006481094
## left_eye_center_x -0.0003532289 1 0.0003532289
## y -0.0003853124 1 0.0003853124
## left_eye_center_y -0.0006416037 1 0.0006416037
## .rnorm -0.0060323121 0 0.0060323121
## cor.high.X freqRatio percentUnique
## P.cor.cut.fctr <NA> 1.257758 0.01128032
## P.cor P.cor.cut.fctr 1.000000 98.16130852
## P.cosSml.cut.fctr <NA> 1.013082 0.01128032
## P.mnkSml.3 <NA> 1.333333 97.87930062
## P.mnkSml.3.scld <NA> 1.333333 97.87930062
## P.mnkSml.2 <NA> 1.333333 98.29385223
## P.mnkSml.2.scld P.mnkSml.3.scld 1.333333 98.29385223
## P.mnkSml.1 <NA> 1.000000 98.07106599
## P.mnkSml.1.scld P.mnkSml.3.scld 1.000000 98.11900733
## P.cosSml <NA> 1.000000 95.81500282
## P.mnkSml.3.scld.cut.fctr P.mnkSml.3.scld 1.125439 0.01128032
## P.mnkSml.2.scld.cut.fctr P.mnkSml.3.scld.cut.fctr 1.034889 0.01128032
## P.mnkSml.1.scld.cut.fctr P.mnkSml.2.scld.cut.fctr 1.033267 0.01128032
## .pos <NA> 1.000000 100.00000000
## x <NA> 1.011520 0.15228426
## left_eye_center_x <NA> 1.090377 0.13254371
## y <NA> 1.011628 0.15792442
## left_eye_center_y <NA> 1.028810 0.10434292
## .rnorm <NA> 1.000000 99.84771574
## zeroVar nzv is.cor.y.abs.low
## P.cor.cut.fctr FALSE FALSE FALSE
## P.cor FALSE FALSE FALSE
## P.cosSml.cut.fctr FALSE FALSE FALSE
## P.mnkSml.3 FALSE FALSE FALSE
## P.mnkSml.3.scld FALSE FALSE FALSE
## P.mnkSml.2 FALSE FALSE FALSE
## P.mnkSml.2.scld FALSE FALSE FALSE
## P.mnkSml.1 FALSE FALSE FALSE
## P.mnkSml.1.scld FALSE FALSE FALSE
## P.cosSml FALSE FALSE FALSE
## P.mnkSml.3.scld.cut.fctr FALSE FALSE FALSE
## P.mnkSml.2.scld.cut.fctr FALSE FALSE FALSE
## P.mnkSml.1.scld.cut.fctr FALSE FALSE FALSE
## .pos FALSE FALSE TRUE
## x FALSE FALSE TRUE
## left_eye_center_x FALSE FALSE TRUE
## y FALSE FALSE TRUE
## left_eye_center_y FALSE FALSE TRUE
## .rnorm FALSE FALSE FALSE
## Warning in myplot_scatter(plt_feats_df, "percentUnique", "freqRatio",
## colorcol_name = "nzv", : converting nzv to class:factor
## Warning: Removed 10 rows containing missing values (geom_point).
## Warning: Removed 10 rows containing missing values (geom_point).
## Warning: Removed 10 rows containing missing values (geom_point).
## [1] cor.y exclude.as.feat cor.y.abs cor.high.X
## [5] freqRatio percentUnique zeroVar nzv
## [9] is.cor.y.abs.low
## <0 rows> (or 0-length row.names)
## Warning in if (grepl("RFE\\.X", names(glbMdlFamilies))) {: the condition
## has length > 1 and only the first element will be used
## [1] "numeric data missing in glbObsAll: "
## left_eye_center_x left_eye_center_y label.fctr
## 44875 44875 44575
## [1] "numeric data w/ 0s in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ Infs in glbObsAll: "
## named integer(0)
## [1] "numeric data w/ NaNs in glbObsAll: "
## named integer(0)
## [1] "string data missing in glbObsAll: "
## ImageId label ImageId.x.y .lcn
## 0 NA 0 44575
## [1] "glb_feats_df:"
## [1] 19 12
## id exclude.as.feat rsp_var
## label.fctr label.fctr TRUE TRUE
## id cor.y exclude.as.feat cor.y.abs cor.high.X freqRatio
## label.fctr label.fctr NA TRUE NA <NA> NA
## percentUnique zeroVar nzv is.cor.y.abs.low interaction.feat
## label.fctr NA NA NA NA NA
## shapiro.test.p.value rsp_var_raw id_var rsp_var
## label.fctr NA NA NA TRUE
## [1] "glb_feats_df vs. glbObsAll: "
## character(0)
## [1] "glbObsAll vs. glb_feats_df: "
## character(0)
## label step_major step_minor label_minor bgn end elapsed
## 15 select.features 7 0 0 70.694 79.093 8.399
## 16 fit.models 8 0 0 79.093 NA NA
8.0: fit modelsfit.models_0_chunk_df <- myadd_chunk(NULL, "fit.models_0_bgn", label.minor = "setup")
## label step_major step_minor label_minor bgn end elapsed
## 1 fit.models_0_bgn 1 0 setup 79.697 NA NA
# load(paste0(glbOut$pfx, "dsk.RData"))
get_model_sel_frmla <- function() {
model_evl_terms <- c(NULL)
# min.aic.fit might not be avl
lclMdlEvlCriteria <-
glbMdlMetricsEval[glbMdlMetricsEval %in% names(glb_models_df)]
for (metric in lclMdlEvlCriteria)
model_evl_terms <- c(model_evl_terms,
ifelse(length(grep("max", metric)) > 0, "-", "+"), metric)
if (glb_is_classification && glb_is_binomial)
model_evl_terms <- c(model_evl_terms, "-", "opt.prob.threshold.OOB")
model_sel_frmla <- as.formula(paste(c("~ ", model_evl_terms), collapse = " "))
return(model_sel_frmla)
}
get_dsp_models_df <- function() {
dsp_models_cols <- c("id",
glbMdlMetricsEval[glbMdlMetricsEval %in% names(glb_models_df)],
grep("opt.", names(glb_models_df), fixed = TRUE, value = TRUE))
dsp_models_df <-
#orderBy(get_model_sel_frmla(), glb_models_df)[, c("id", glbMdlMetricsEval)]
orderBy(get_model_sel_frmla(), glb_models_df)[, dsp_models_cols]
nCvMdl <- sapply(glb_models_lst, function(mdl) nrow(mdl$results))
nParams <- sapply(glb_models_lst, function(mdl) ifelse(mdl$method == "custom", 0,
nrow(subset(modelLookup(mdl$method), parameter != "parameter"))))
# nCvMdl <- nCvMdl[names(nCvMdl) != "avNNet"]
# nParams <- nParams[names(nParams) != "avNNet"]
if (length(cvMdlProblems <- nCvMdl[nCvMdl <= nParams]) > 0) {
print("Cross Validation issues:")
warning("Cross Validation issues:")
print(cvMdlProblems)
}
pltMdls <- setdiff(names(nCvMdl), names(cvMdlProblems))
pltMdls <- setdiff(pltMdls, names(nParams[nParams == 0]))
# length(pltMdls) == 21
png(paste0(glbOut$pfx, "bestTune.png"), width = 480 * 2, height = 480 * 4)
grid.newpage()
pushViewport(viewport(layout = grid.layout(ceiling(length(pltMdls) / 2.0), 2)))
pltIx <- 1
for (mdlId in pltMdls) {
print(ggplot(glb_models_lst[[mdlId]], highBestTune = TRUE) + labs(title = mdlId),
vp = viewport(layout.pos.row = ceiling(pltIx / 2.0),
layout.pos.col = ((pltIx - 1) %% 2) + 1))
pltIx <- pltIx + 1
}
dev.off()
if (all(row.names(dsp_models_df) != dsp_models_df$id))
row.names(dsp_models_df) <- dsp_models_df$id
return(dsp_models_df)
}
#get_dsp_models_df()
if (glb_is_classification && glb_is_binomial &&
(length(unique(glbObsFit[, glb_rsp_var])) < 2))
stop("glbObsFit$", glb_rsp_var, ": contains less than 2 unique values: ",
paste0(unique(glbObsFit[, glb_rsp_var]), collapse=", "))
max_cor_y_x_vars <- orderBy(~ -cor.y.abs,
subset(glb_feats_df, (exclude.as.feat == 0) & !nzv & !is.cor.y.abs.low &
is.na(cor.high.X)))[1:2, "id"]
max_cor_y_x_vars <- max_cor_y_x_vars[!is.na(max_cor_y_x_vars)]
if (length(max_cor_y_x_vars) < 2)
max_cor_y_x_vars <- union(max_cor_y_x_vars, ".pos")
if (!is.null(glb_Baseline_mdl_var)) {
if ((max_cor_y_x_vars[1] != glb_Baseline_mdl_var) &
(glb_feats_df[glb_feats_df$id == max_cor_y_x_vars[1], "cor.y.abs"] >
glb_feats_df[glb_feats_df$id == glb_Baseline_mdl_var, "cor.y.abs"]))
stop(max_cor_y_x_vars[1], " has a higher correlation with ", glb_rsp_var,
" than the Baseline var: ", glb_Baseline_mdl_var)
}
glb_model_type <- ifelse(glb_is_regression, "regression", "classification")
# Model specs
c("id.prefix", "method", "type",
# trainControl params
"preProc.method", "cv.n.folds", "cv.n.repeats", "summary.fn",
# train params
"metric", "metric.maximize", "tune.df")
## [1] "id.prefix" "method" "type"
## [4] "preProc.method" "cv.n.folds" "cv.n.repeats"
## [7] "summary.fn" "metric" "metric.maximize"
## [10] "tune.df"
# Baseline
if (!is.null(glb_Baseline_mdl_var)) {
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Baseline"), major.inc = FALSE,
label.minor = "mybaseln_classfr")
ret_lst <- myfit_mdl(mdl_id="Baseline",
model_method="mybaseln_classfr",
indep_vars_vctr=glb_Baseline_mdl_var,
rsp_var=glb_rsp_var,
fit_df=glbObsFit, OOB_df=glbObsOOB)
}
# Most Frequent Outcome "MFO" model: mean(y) for regression
# Not using caret's nullModel since model stats not avl
# Cannot use rpart for multinomial classification since it predicts non-MFO
if (glb_is_classification) {
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "MFO"), major.inc = FALSE,
label.minor = "myMFO_classfr")
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "MFO", type = glb_model_type, trainControl.method = "none",
train.method = ifelse(glb_is_regression, "lm", "myMFO_classfr"))),
indep_vars = ".rnorm", rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
# "random" model - only for classification;
# none needed for regression since it is same as MFO
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Random"), major.inc = FALSE,
label.minor = "myrandom_classfr")
#stop(here"); glb2Sav(); all.equal(glb_models_df, sav_models_df)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Random", type = glb_model_type, trainControl.method = "none",
train.method = "myrandom_classfr")),
indep_vars = ".rnorm", rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
}
## label step_major step_minor label_minor bgn end
## 1 fit.models_0_bgn 1 0 setup 79.697 79.729
## 2 fit.models_0_MFO 1 1 myMFO_classfr 79.730 NA
## elapsed
## 1 0.033
## 2 NA
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: MFO###myMFO_classfr"
## [1] " indep_vars: .rnorm"
## [1] "myfit_mdl: setup complete: 0.457000 secs"
## Fitting parameter = none on full training set
## [1] "in MFO.Classifier$fit"
## [1] "unique.vals:"
## [1] .none left_eye_center
## Levels: .none left_eye_center
## [1] "unique.prob:"
## y
## .none left_eye_center
## 0.801658 0.198342
## [1] "MFO.val:"
## [1] ".none"
## [1] "myfit_mdl: train complete: 0.936000 secs"
## Length Class Mode
## unique.vals 2 factor numeric
## unique.prob 2 -none- numeric
## MFO.val 1 -none- character
## x.names 1 -none- character
## xNames 1 -none- character
## problemType 1 -none- character
## tuneValue 1 data.frame list
## obsLevels 2 -none- character
## [1] "myfit_mdl: train diagnostics complete: 0.938000 secs"
## Loading required namespace: pROC
## [1] "entr MFO.Classifier$predict"
## [1] "exit MFO.Classifier$predict"
## Loading required package: ROCR
## Loading required package: gplots
##
## Attaching package: 'gplots'
## The following object is masked from 'package:stats':
##
## lowess
## [1] "in MFO.Classifier$prob"
## .none left_eye_center
## 1 0.801658 0.198342
## 2 0.801658 0.198342
## 3 0.801658 0.198342
## 4 0.801658 0.198342
## 5 0.801658 0.198342
## 6 0.801658 0.198342
## Prediction
## Reference .none left_eye_center
## .none 0 14215
## left_eye_center 0 3517
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.1983420 0.0000000 0.1924945 0.2042885 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "entr MFO.Classifier$predict"
## [1] "exit MFO.Classifier$predict"
## [1] "in MFO.Classifier$prob"
## .none left_eye_center
## 1 0.801658 0.198342
## 2 0.801658 0.198342
## 3 0.801658 0.198342
## 4 0.801658 0.198342
## 5 0.801658 0.198342
## 6 0.801658 0.198342
## Prediction
## Reference .none left_eye_center
## .none 0 14213
## left_eye_center 0 3515
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.1982739 0.0000000 0.1924266 0.2042204 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 5.609000 secs"
## id feats max.nTuningRuns min.elapsedtime.everything
## 1 MFO###myMFO_classfr .rnorm 0 0.455
## min.elapsedtime.final max.AUCpROC.fit max.Sens.fit max.Spec.fit
## 1 0.004 0.5 1 0
## max.AUCROCR.fit opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.5 0.1 0.3310273 0.198342
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.1924945 0.2042885 0
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5 1 0 0.5
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.1 0.3309325 0.1982739
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.1924266 0.2042204 0
## [1] "myfit_mdl: exit: 5.618000 secs"
## label step_major step_minor label_minor bgn end
## 2 fit.models_0_MFO 1 1 myMFO_classfr 79.730 85.353
## 3 fit.models_0_Random 1 2 myrandom_classfr 85.354 NA
## elapsed
## 2 5.623
## 3 NA
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Random###myrandom_classfr"
## [1] " indep_vars: .rnorm"
## [1] "myfit_mdl: setup complete: 0.427000 secs"
## Fitting parameter = none on full training set
## [1] "myfit_mdl: train complete: 0.751000 secs"
## Length Class Mode
## unique.vals 2 factor numeric
## unique.prob 2 table numeric
## xNames 1 -none- character
## problemType 1 -none- character
## tuneValue 1 data.frame list
## obsLevels 2 -none- character
## [1] "myfit_mdl: train diagnostics complete: 0.753000 secs"
## [1] "in Random.Classifier$prob"
## Prediction
## Reference .none left_eye_center
## .none 0 14215
## left_eye_center 0 3517
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.1983420 0.0000000 0.1924945 0.2042885 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "in Random.Classifier$prob"
## Prediction
## Reference .none left_eye_center
## .none 0 14213
## left_eye_center 0 3515
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.1982739 0.0000000 0.1924266 0.2042204 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 17.910000 secs"
## id feats max.nTuningRuns
## 1 Random###myrandom_classfr .rnorm 0
## min.elapsedtime.everything min.elapsedtime.final max.AUCpROC.fit
## 1 0.305 0.004 0.4988761
## max.Sens.fit max.Spec.fit max.AUCROCR.fit opt.prob.threshold.fit
## 1 0.8001407 0.1976116 0.4977388 0.1
## max.f.score.fit max.Accuracy.fit max.AccuracyLower.fit
## 1 0.3310273 0.198342 0.1924945
## max.AccuracyUpper.fit max.Kappa.fit max.AUCpROC.OOB max.Sens.OOB
## 1 0.2042885 0 0.5027819 0.8024344
## max.Spec.OOB max.AUCROCR.OOB opt.prob.threshold.OOB max.f.score.OOB
## 1 0.2031294 0.5012493 0.1 0.3309325
## max.Accuracy.OOB max.AccuracyLower.OOB max.AccuracyUpper.OOB
## 1 0.1982739 0.1924266 0.2042204
## max.Kappa.OOB
## 1 0
## [1] "myfit_mdl: exit: 17.922000 secs"
# Max.cor.Y
# Check impact of cv
# rpart is not a good candidate since caret does not optimize cp (only tuning parameter of rpart) well
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Max.cor.Y.rcv.*X*"), major.inc = FALSE,
label.minor = "glmnet")
## label step_major step_minor label_minor
## 3 fit.models_0_Random 1 2 myrandom_classfr
## 4 fit.models_0_Max.cor.Y.rcv.*X* 1 3 glmnet
## bgn end elapsed
## 3 85.354 103.287 17.934
## 4 103.288 NA NA
ret_lst <- myfit_mdl(mdl_specs_lst=myinit_mdl_specs_lst(mdl_specs_lst=list(
id.prefix="Max.cor.Y.rcv.1X1", type=glb_model_type, trainControl.method="none",
train.method="glmnet")),
indep_vars=max_cor_y_x_vars, rsp_var=glb_rsp_var,
fit_df=glbObsFit, OOB_df=glbObsOOB)
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Max.cor.Y.rcv.1X1###glmnet"
## [1] " indep_vars: P.cor.cut.fctr,P.cosSml.cut.fctr"
## [1] "myfit_mdl: setup complete: 0.680000 secs"
## Loading required package: glmnet
## Loading required package: Matrix
## Loaded glmnet 2.0-2
## Fitting alpha = 0.1, lambda = 0.00178 on full training set
## [1] "myfit_mdl: train complete: 1.998000 secs"
## Length Class Mode
## a0 77 -none- numeric
## beta 462 dgCMatrix S4
## df 77 -none- numeric
## dim 2 -none- numeric
## lambda 77 -none- numeric
## dev.ratio 77 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 6 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept) P.cor.cut.fctr(0,0.5]
## -1.988988801 -0.158907025
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## 0.765853419 1.584075106
## P.cosSml.cut.fctr(0.95,0.97] P.cosSml.cut.fctr(0.97,0.98]
## -0.073582905 0.003537194
## P.cosSml.cut.fctr(0.98,1]
## 0.243239337
## [1] "max lambda < lambdaOpt:"
## (Intercept) P.cor.cut.fctr(0,0.5]
## -2.007597573 -0.140505829
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## 0.785185395 1.603959899
## P.cosSml.cut.fctr(0.95,0.97] P.cosSml.cut.fctr(0.97,0.98]
## -0.073835515 0.003346747
## P.cosSml.cut.fctr(0.98,1]
## 0.242765545
## [1] "myfit_mdl: train diagnostics complete: 2.110000 secs"
## Prediction
## Reference .none left_eye_center
## .none 7868 6347
## left_eye_center 923 2594
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.5900068 0.1841772 0.5827252 0.5972588 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 7867 6346
## left_eye_center 921 2594
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.5900835 0.1843870 0.5828013 0.5973362 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 6.764000 secs"
## id feats
## 1 Max.cor.Y.rcv.1X1###glmnet P.cor.cut.fctr,P.cosSml.cut.fctr
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 0 1.294 0.415
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5 1 0 0.6853218
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4164392 0.5900068
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.5827252 0.5972588 0.1841772
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5 1 0 0.680979
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.2 0.4165395 0.5900835
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.5828013 0.5973362 0.184387
## [1] "myfit_mdl: exit: 6.776000 secs"
if (glbMdlCheckRcv) {
# rcv_n_folds == 1 & rcv_n_repeats > 1 crashes
for (rcv_n_folds in seq(3, glb_rcv_n_folds + 2, 2))
for (rcv_n_repeats in seq(1, glb_rcv_n_repeats + 2, 2)) {
# Experiment specific code to avoid caret crash
# lcl_tune_models_df <- rbind(data.frame()
# ,data.frame(method = "glmnet", parameter = "alpha",
# vals = "0.100 0.325 0.550 0.775 1.000")
# ,data.frame(method = "glmnet", parameter = "lambda",
# vals = "9.342e-02")
# )
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst =
list(
id.prefix = paste0("Max.cor.Y.rcv.", rcv_n_folds, "X", rcv_n_repeats),
type = glb_model_type,
# tune.df = lcl_tune_models_df,
trainControl.method = "repeatedcv",
trainControl.number = rcv_n_folds,
trainControl.repeats = rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.method = "glmnet", train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize)),
indep_vars = max_cor_y_x_vars, rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
}
# Add parallel coordinates graph of glb_models_df[, glbMdlMetricsEval] to evaluate cv parameters
tmp_models_cols <- c("id", "max.nTuningRuns",
glbMdlMetricsEval[glbMdlMetricsEval %in% names(glb_models_df)],
grep("opt.", names(glb_models_df), fixed = TRUE, value = TRUE))
print(myplot_parcoord(obs_df = subset(glb_models_df,
grepl("Max.cor.Y.rcv.", id, fixed = TRUE),
select = -feats)[, tmp_models_cols],
id_var = "id"))
}
# Useful for stacking decisions
# fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
# paste0("fit.models_0_", "Max.cor.Y[rcv.1X1.cp.0|]"), major.inc = FALSE,
# label.minor = "rpart")
#
# ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
# id.prefix = "Max.cor.Y.rcv.1X1.cp.0", type = glb_model_type, trainControl.method = "none",
# train.method = "rpart",
# tune.df=data.frame(method="rpart", parameter="cp", min=0.0, max=0.0, by=0.1))),
# indep_vars=max_cor_y_x_vars, rsp_var=glb_rsp_var,
# fit_df=glbObsFit, OOB_df=glbObsOOB)
#stop(here"); glb2Sav(); all.equal(glb_models_df, sav_models_df)
# if (glb_is_regression || glb_is_binomial) # For multinomials this model will be run next by default
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Max.cor.Y",
type = glb_model_type, trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds,
trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "rpart")),
indep_vars = max_cor_y_x_vars, rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Max.cor.Y##rcv#rpart"
## [1] " indep_vars: P.cor.cut.fctr,P.cosSml.cut.fctr"
## [1] "myfit_mdl: setup complete: 0.676000 secs"
## Loading required package: rpart
## + Fold1.Rep1: cp=0
## - Fold1.Rep1: cp=0
## + Fold2.Rep1: cp=0
## - Fold2.Rep1: cp=0
## + Fold3.Rep1: cp=0
## - Fold3.Rep1: cp=0
## + Fold1.Rep2: cp=0
## - Fold1.Rep2: cp=0
## + Fold2.Rep2: cp=0
## - Fold2.Rep2: cp=0
## + Fold3.Rep2: cp=0
## - Fold3.Rep2: cp=0
## + Fold1.Rep3: cp=0
## - Fold1.Rep3: cp=0
## + Fold2.Rep3: cp=0
## - Fold2.Rep3: cp=0
## + Fold3.Rep3: cp=0
## - Fold3.Rep3: cp=0
## Aggregating results
## Fitting final model on full training set
## [1] "myfit_mdl: train complete: 3.898000 secs"
## Loading required package: rpart.plot
## Call:
## rpart(formula = .outcome ~ ., control = list(minsplit = 20, minbucket = 7,
## cp = 0, maxcompete = 4, maxsurrogate = 5, usesurrogate = 2,
## surrogatestyle = 0, maxdepth = 30, xval = 0))
## n= 17732
##
## CP nsplit rel error
## 1 0 0 1
##
## Node number 1: 17732 observations
## predicted class=.none expected loss=0.198342 P(node) =1
## class counts: 14215 3517
## probabilities: 0.802 0.198
##
## n= 17732
##
## node), split, n, loss, yval, (yprob)
## * denotes terminal node
##
## 1) root 17732 3517 .none (0.8016580 0.1983420) *
## [1] "myfit_mdl: train diagnostics complete: 4.081000 secs"
## Prediction
## Reference .none left_eye_center
## .none 0 14215
## left_eye_center 0 3517
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.1983420 0.0000000 0.1924945 0.2042885 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 0 14213
## left_eye_center 0 3515
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.1982739 0.0000000 0.1924266 0.2042204 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 8.463000 secs"
## id feats max.nTuningRuns
## 1 Max.cor.Y##rcv#rpart P.cor.cut.fctr,P.cosSml.cut.fctr 1
## min.elapsedtime.everything min.elapsedtime.final max.AUCpROC.fit
## 1 3.201 0.051 0.5
## max.Sens.fit max.Spec.fit max.AUCROCR.fit opt.prob.threshold.fit
## 1 1 0 0.5 0.1
## max.f.score.fit max.Accuracy.fit max.AccuracyLower.fit
## 1 0.3310273 0.801658 0.1924945
## max.AccuracyUpper.fit max.Kappa.fit max.AUCpROC.OOB max.Sens.OOB
## 1 0.2042885 0 0.5 1
## max.Spec.OOB max.AUCROCR.OOB opt.prob.threshold.OOB max.f.score.OOB
## 1 0 0.5 0.1 0.3309325
## max.Accuracy.OOB max.AccuracyLower.OOB max.AccuracyUpper.OOB
## 1 0.1982739 0.1924266 0.2042204
## max.Kappa.OOB max.AccuracySD.fit max.KappaSD.fit
## 1 0 6.660446e-05 0
## [1] "myfit_mdl: exit: 8.477000 secs"
if ((length(glbFeatsDateTime) > 0) &&
(sum(grepl(paste(names(glbFeatsDateTime), "\\.day\\.minutes\\.poly\\.", sep = ""),
names(glbObsAll))) > 0)) {
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Max.cor.Y.Time.Poly"), major.inc = FALSE,
label.minor = "glmnet")
indepVars <- c(max_cor_y_x_vars,
grep(paste(names(glbFeatsDateTime), "\\.day\\.minutes\\.poly\\.", sep = ""),
names(glbObsAll), value = TRUE))
indepVars <- myadjust_interaction_feats(indepVars)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Max.cor.Y.Time.Poly",
type = glb_model_type, trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds, trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "glmnet")),
indep_vars = indepVars,
rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
}
if ((length(glbFeatsDateTime) > 0) &&
(sum(grepl(paste(names(glbFeatsDateTime), "\\.last[[:digit:]]", sep = ""),
names(glbObsAll))) > 0)) {
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Max.cor.Y.Time.Lag"), major.inc = FALSE,
label.minor = "glmnet")
indepVars <- c(max_cor_y_x_vars,
grep(paste(names(glbFeatsDateTime), "\\.last[[:digit:]]", sep = ""),
names(glbObsAll), value = TRUE))
indepVars <- myadjust_interaction_feats(indepVars)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Max.cor.Y.Time.Lag",
type = glb_model_type,
tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds, trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "glmnet")),
indep_vars = indepVars,
rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
}
if (length(glbFeatsText) > 0) {
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Txt.*"), major.inc = FALSE,
label.minor = "glmnet")
indepVars <- c(max_cor_y_x_vars)
for (txtFeat in names(glbFeatsText))
indepVars <- union(indepVars,
grep(paste(str_to_upper(substr(txtFeat, 1, 1)), "\\.(?!([T|P]\\.))", sep = ""),
names(glbObsAll), perl = TRUE, value = TRUE))
indepVars <- myadjust_interaction_feats(indepVars)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Max.cor.Y.Text.nonTP",
type = glb_model_type,
tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds, trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "glmnet")),
indep_vars = indepVars,
rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
indepVars <- c(max_cor_y_x_vars)
for (txtFeat in names(glbFeatsText))
indepVars <- union(indepVars,
grep(paste(str_to_upper(substr(txtFeat, 1, 1)), "\\.T\\.", sep = ""),
names(glbObsAll), perl = TRUE, value = TRUE))
indepVars <- myadjust_interaction_feats(indepVars)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Max.cor.Y.Text.onlyT",
type = glb_model_type,
tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds, trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "glmnet")),
indep_vars = indepVars,
rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
indepVars <- c(max_cor_y_x_vars)
for (txtFeat in names(glbFeatsText))
indepVars <- union(indepVars,
grep(paste(str_to_upper(substr(txtFeat, 1, 1)), "\\.P\\.", sep = ""),
names(glbObsAll), perl = TRUE, value = TRUE))
indepVars <- myadjust_interaction_feats(indepVars)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Max.cor.Y.Text.onlyP",
type = glb_model_type,
tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds, trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "glmnet")),
indep_vars = indepVars,
rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
}
# Interactions.High.cor.Y
if (length(int_feats <- setdiff(setdiff(unique(glb_feats_df$cor.high.X), NA),
subset(glb_feats_df, nzv)$id)) > 0) {
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Interact.High.cor.Y"), major.inc = FALSE,
label.minor = "glmnet")
ret_lst <- myfit_mdl(mdl_specs_lst=myinit_mdl_specs_lst(mdl_specs_lst=list(
id.prefix="Interact.High.cor.Y",
type=glb_model_type, trainControl.method="repeatedcv",
trainControl.number=glb_rcv_n_folds, trainControl.repeats=glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method="glmnet")),
indep_vars=c(max_cor_y_x_vars, paste(max_cor_y_x_vars[1], int_feats, sep=":")),
rsp_var=glb_rsp_var,
fit_df=glbObsFit, OOB_df=glbObsOOB)
}
## label step_major step_minor label_minor
## 4 fit.models_0_Max.cor.Y.rcv.*X* 1 3 glmnet
## 5 fit.models_0_Interact.High.cor.Y 1 4 glmnet
## bgn end elapsed
## 4 103.288 118.58 15.292
## 5 118.580 NA NA
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Interact.High.cor.Y##rcv#glmnet"
## [1] " indep_vars: P.cor.cut.fctr,P.cosSml.cut.fctr,P.cor.cut.fctr:P.cor.cut.fctr,P.cor.cut.fctr:P.mnkSml.3.scld,P.cor.cut.fctr:P.mnkSml.3.scld.cut.fctr,P.cor.cut.fctr:P.mnkSml.2.scld.cut.fctr"
## [1] "myfit_mdl: setup complete: 0.703000 secs"
## + Fold1.Rep1: alpha=0.100, lambda=0.03844
## - Fold1.Rep1: alpha=0.100, lambda=0.03844
## + Fold1.Rep1: alpha=0.325, lambda=0.03844
## - Fold1.Rep1: alpha=0.325, lambda=0.03844
## + Fold1.Rep1: alpha=0.550, lambda=0.03844
## - Fold1.Rep1: alpha=0.550, lambda=0.03844
## + Fold1.Rep1: alpha=0.775, lambda=0.03844
## - Fold1.Rep1: alpha=0.775, lambda=0.03844
## + Fold1.Rep1: alpha=1.000, lambda=0.03844
## - Fold1.Rep1: alpha=1.000, lambda=0.03844
## + Fold2.Rep1: alpha=0.100, lambda=0.03844
## - Fold2.Rep1: alpha=0.100, lambda=0.03844
## + Fold2.Rep1: alpha=0.325, lambda=0.03844
## - Fold2.Rep1: alpha=0.325, lambda=0.03844
## + Fold2.Rep1: alpha=0.550, lambda=0.03844
## - Fold2.Rep1: alpha=0.550, lambda=0.03844
## + Fold2.Rep1: alpha=0.775, lambda=0.03844
## - Fold2.Rep1: alpha=0.775, lambda=0.03844
## + Fold2.Rep1: alpha=1.000, lambda=0.03844
## - Fold2.Rep1: alpha=1.000, lambda=0.03844
## + Fold3.Rep1: alpha=0.100, lambda=0.03844
## - Fold3.Rep1: alpha=0.100, lambda=0.03844
## + Fold3.Rep1: alpha=0.325, lambda=0.03844
## - Fold3.Rep1: alpha=0.325, lambda=0.03844
## + Fold3.Rep1: alpha=0.550, lambda=0.03844
## - Fold3.Rep1: alpha=0.550, lambda=0.03844
## + Fold3.Rep1: alpha=0.775, lambda=0.03844
## - Fold3.Rep1: alpha=0.775, lambda=0.03844
## + Fold3.Rep1: alpha=1.000, lambda=0.03844
## - Fold3.Rep1: alpha=1.000, lambda=0.03844
## + Fold1.Rep2: alpha=0.100, lambda=0.03844
## - Fold1.Rep2: alpha=0.100, lambda=0.03844
## + Fold1.Rep2: alpha=0.325, lambda=0.03844
## - Fold1.Rep2: alpha=0.325, lambda=0.03844
## + Fold1.Rep2: alpha=0.550, lambda=0.03844
## - Fold1.Rep2: alpha=0.550, lambda=0.03844
## + Fold1.Rep2: alpha=0.775, lambda=0.03844
## - Fold1.Rep2: alpha=0.775, lambda=0.03844
## + Fold1.Rep2: alpha=1.000, lambda=0.03844
## - Fold1.Rep2: alpha=1.000, lambda=0.03844
## + Fold2.Rep2: alpha=0.100, lambda=0.03844
## - Fold2.Rep2: alpha=0.100, lambda=0.03844
## + Fold2.Rep2: alpha=0.325, lambda=0.03844
## - Fold2.Rep2: alpha=0.325, lambda=0.03844
## + Fold2.Rep2: alpha=0.550, lambda=0.03844
## - Fold2.Rep2: alpha=0.550, lambda=0.03844
## + Fold2.Rep2: alpha=0.775, lambda=0.03844
## - Fold2.Rep2: alpha=0.775, lambda=0.03844
## + Fold2.Rep2: alpha=1.000, lambda=0.03844
## - Fold2.Rep2: alpha=1.000, lambda=0.03844
## + Fold3.Rep2: alpha=0.100, lambda=0.03844
## - Fold3.Rep2: alpha=0.100, lambda=0.03844
## + Fold3.Rep2: alpha=0.325, lambda=0.03844
## - Fold3.Rep2: alpha=0.325, lambda=0.03844
## + Fold3.Rep2: alpha=0.550, lambda=0.03844
## - Fold3.Rep2: alpha=0.550, lambda=0.03844
## + Fold3.Rep2: alpha=0.775, lambda=0.03844
## - Fold3.Rep2: alpha=0.775, lambda=0.03844
## + Fold3.Rep2: alpha=1.000, lambda=0.03844
## - Fold3.Rep2: alpha=1.000, lambda=0.03844
## + Fold1.Rep3: alpha=0.100, lambda=0.03844
## - Fold1.Rep3: alpha=0.100, lambda=0.03844
## + Fold1.Rep3: alpha=0.325, lambda=0.03844
## - Fold1.Rep3: alpha=0.325, lambda=0.03844
## + Fold1.Rep3: alpha=0.550, lambda=0.03844
## - Fold1.Rep3: alpha=0.550, lambda=0.03844
## + Fold1.Rep3: alpha=0.775, lambda=0.03844
## - Fold1.Rep3: alpha=0.775, lambda=0.03844
## + Fold1.Rep3: alpha=1.000, lambda=0.03844
## - Fold1.Rep3: alpha=1.000, lambda=0.03844
## + Fold2.Rep3: alpha=0.100, lambda=0.03844
## - Fold2.Rep3: alpha=0.100, lambda=0.03844
## + Fold2.Rep3: alpha=0.325, lambda=0.03844
## - Fold2.Rep3: alpha=0.325, lambda=0.03844
## + Fold2.Rep3: alpha=0.550, lambda=0.03844
## - Fold2.Rep3: alpha=0.550, lambda=0.03844
## + Fold2.Rep3: alpha=0.775, lambda=0.03844
## - Fold2.Rep3: alpha=0.775, lambda=0.03844
## + Fold2.Rep3: alpha=1.000, lambda=0.03844
## - Fold2.Rep3: alpha=1.000, lambda=0.03844
## + Fold3.Rep3: alpha=0.100, lambda=0.03844
## - Fold3.Rep3: alpha=0.100, lambda=0.03844
## + Fold3.Rep3: alpha=0.325, lambda=0.03844
## - Fold3.Rep3: alpha=0.325, lambda=0.03844
## + Fold3.Rep3: alpha=0.550, lambda=0.03844
## - Fold3.Rep3: alpha=0.550, lambda=0.03844
## + Fold3.Rep3: alpha=0.775, lambda=0.03844
## - Fold3.Rep3: alpha=0.775, lambda=0.03844
## + Fold3.Rep3: alpha=1.000, lambda=0.03844
## - Fold3.Rep3: alpha=1.000, lambda=0.03844
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.00178 on full training set
## [1] "myfit_mdl: train complete: 50.775000 secs"
## Warning in myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst
## = list(id.prefix = "Interact.High.cor.Y", : model's bestTune found at an
## extreme of tuneGrid for parameter: alpha
## Length Class Mode
## a0 73 -none- numeric
## beta 2482 dgCMatrix S4
## df 73 -none- numeric
## dim 2 -none- numeric
## lambda 73 -none- numeric
## dev.ratio 73 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 34 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept)
## -1.271489067
## P.cor.cut.fctr(0,0.5]
## -0.854346609
## P.cor.cut.fctr(0.7,1]
## 0.590109091
## P.cosSml.cut.fctr(0.95,0.97]
## -0.053191243
## P.cosSml.cut.fctr(0.98,1]
## 0.228841035
## P.cor.cut.fctr(0,0.5]:P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## -0.008777948
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## -0.021382281
## P.cor.cut.fctr(0.7,1]:P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## 0.238513108
## P.cor.cut.fctr(0,0.5]:P.mnkSml.2.scld.cut.fctr(1.14,1.48]
## -0.034271831
## P.cor.cut.fctr(0.7,1]:P.mnkSml.2.scld.cut.fctr(1.14,1.48]
## 0.132163305
## P.cor.cut.fctr(0,0.5]:P.mnkSml.2.scld.cut.fctr(1.48,4.6]
## 0.014848457
## P.cor.cut.fctr(-1,0]:P.mnkSml.3.scld
## -3.906398749
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.3.scld
## 0.225048747
## P.cor.cut.fctr(0.7,1]:P.mnkSml.3.scld
## 0.385967848
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## -0.086908327
## P.cor.cut.fctr(0.7,1]:P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## 0.230138195
## P.cor.cut.fctr(0.7,1]:P.mnkSml.3.scld.cut.fctr(0.27,0.35]
## 0.067562169
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.3.scld.cut.fctr(0.35,1.1]
## 0.009698182
## [1] "max lambda < lambdaOpt:"
## (Intercept)
## -1.272467062
## P.cor.cut.fctr(0,0.5]
## -0.854160644
## P.cor.cut.fctr(0.7,1]
## 0.586855955
## P.cosSml.cut.fctr(0.95,0.97]
## -0.055293489
## P.cosSml.cut.fctr(0.98,1]
## 0.228462567
## P.cor.cut.fctr(0,0.5]:P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## -0.011691024
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## -0.024431947
## P.cor.cut.fctr(0.7,1]:P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## 0.240110017
## P.cor.cut.fctr(0,0.5]:P.mnkSml.2.scld.cut.fctr(1.14,1.48]
## -0.037021117
## P.cor.cut.fctr(0.7,1]:P.mnkSml.2.scld.cut.fctr(1.14,1.48]
## 0.135423856
## P.cor.cut.fctr(0,0.5]:P.mnkSml.2.scld.cut.fctr(1.48,4.6]
## 0.021202600
## P.cor.cut.fctr(-1,0]:P.mnkSml.3.scld
## -3.953489423
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.3.scld
## 0.238494658
## P.cor.cut.fctr(0.7,1]:P.mnkSml.3.scld
## 0.397317735
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## -0.089076250
## P.cor.cut.fctr(0.7,1]:P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## 0.234896056
## P.cor.cut.fctr(0.7,1]:P.mnkSml.3.scld.cut.fctr(0.27,0.35]
## 0.069920455
## P.cor.cut.fctr(0.5,0.7]:P.mnkSml.3.scld.cut.fctr(0.35,1.1]
## 0.007489772
## [1] "myfit_mdl: train diagnostics complete: 51.352000 secs"
## Prediction
## Reference .none left_eye_center
## .none 7868 6347
## left_eye_center 923 2594
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.5900068 0.1841772 0.5827252 0.5972588 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 7867 6346
## left_eye_center 921 2594
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.5900835 0.1843870 0.5828013 0.5973362 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 64.733000 secs"
## id
## 1 Interact.High.cor.Y##rcv#glmnet
## feats
## 1 P.cor.cut.fctr,P.cosSml.cut.fctr,P.cor.cut.fctr:P.cor.cut.fctr,P.cor.cut.fctr:P.mnkSml.3.scld,P.cor.cut.fctr:P.mnkSml.3.scld.cut.fctr,P.cor.cut.fctr:P.mnkSml.2.scld.cut.fctr
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 25 50.034 1.394
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5072711 0.9960605 0.01848166 0.6898095
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4164392 0.8020339
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.5827252 0.5972588 0.02626314
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5056058 0.9958489 0.01536273 0.6835589
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.2 0.4165395 0.5900835
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.5828013 0.5973362 0.184387
## max.AccuracySD.fit max.KappaSD.fit
## 1 0.0007680048 0.005680814
## [1] "myfit_mdl: exit: 64.747000 secs"
# Low.cor.X
fit.models_0_chunk_df <- myadd_chunk(fit.models_0_chunk_df,
paste0("fit.models_0_", "Low.cor.X"), major.inc = FALSE,
label.minor = "glmnet")
## label step_major step_minor label_minor
## 5 fit.models_0_Interact.High.cor.Y 1 4 glmnet
## 6 fit.models_0_Low.cor.X 1 5 glmnet
## bgn end elapsed
## 5 118.580 183.564 64.984
## 6 183.565 NA NA
indep_vars <- subset(glb_feats_df, is.na(cor.high.X) & !nzv &
(exclude.as.feat != 1))[, "id"]
indep_vars <- myadjust_interaction_feats(indep_vars)
ret_lst <- myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = "Low.cor.X",
type = glb_model_type,
tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds, trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = "glmnet")),
indep_vars = indep_vars, rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Low.cor.X##rcv#glmnet"
## [1] " indep_vars: P.cor.cut.fctr,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.cosSml,.rnorm"
## [1] "myfit_mdl: setup complete: 0.760000 secs"
## + Fold1.Rep1: alpha=0.100, lambda=0.03844
## - Fold1.Rep1: alpha=0.100, lambda=0.03844
## + Fold1.Rep1: alpha=0.325, lambda=0.03844
## - Fold1.Rep1: alpha=0.325, lambda=0.03844
## + Fold1.Rep1: alpha=0.550, lambda=0.03844
## - Fold1.Rep1: alpha=0.550, lambda=0.03844
## + Fold1.Rep1: alpha=0.775, lambda=0.03844
## - Fold1.Rep1: alpha=0.775, lambda=0.03844
## + Fold1.Rep1: alpha=1.000, lambda=0.03844
## - Fold1.Rep1: alpha=1.000, lambda=0.03844
## + Fold2.Rep1: alpha=0.100, lambda=0.03844
## - Fold2.Rep1: alpha=0.100, lambda=0.03844
## + Fold2.Rep1: alpha=0.325, lambda=0.03844
## - Fold2.Rep1: alpha=0.325, lambda=0.03844
## + Fold2.Rep1: alpha=0.550, lambda=0.03844
## - Fold2.Rep1: alpha=0.550, lambda=0.03844
## + Fold2.Rep1: alpha=0.775, lambda=0.03844
## - Fold2.Rep1: alpha=0.775, lambda=0.03844
## + Fold2.Rep1: alpha=1.000, lambda=0.03844
## - Fold2.Rep1: alpha=1.000, lambda=0.03844
## + Fold3.Rep1: alpha=0.100, lambda=0.03844
## - Fold3.Rep1: alpha=0.100, lambda=0.03844
## + Fold3.Rep1: alpha=0.325, lambda=0.03844
## - Fold3.Rep1: alpha=0.325, lambda=0.03844
## + Fold3.Rep1: alpha=0.550, lambda=0.03844
## - Fold3.Rep1: alpha=0.550, lambda=0.03844
## + Fold3.Rep1: alpha=0.775, lambda=0.03844
## - Fold3.Rep1: alpha=0.775, lambda=0.03844
## + Fold3.Rep1: alpha=1.000, lambda=0.03844
## - Fold3.Rep1: alpha=1.000, lambda=0.03844
## + Fold1.Rep2: alpha=0.100, lambda=0.03844
## - Fold1.Rep2: alpha=0.100, lambda=0.03844
## + Fold1.Rep2: alpha=0.325, lambda=0.03844
## - Fold1.Rep2: alpha=0.325, lambda=0.03844
## + Fold1.Rep2: alpha=0.550, lambda=0.03844
## - Fold1.Rep2: alpha=0.550, lambda=0.03844
## + Fold1.Rep2: alpha=0.775, lambda=0.03844
## - Fold1.Rep2: alpha=0.775, lambda=0.03844
## + Fold1.Rep2: alpha=1.000, lambda=0.03844
## - Fold1.Rep2: alpha=1.000, lambda=0.03844
## + Fold2.Rep2: alpha=0.100, lambda=0.03844
## - Fold2.Rep2: alpha=0.100, lambda=0.03844
## + Fold2.Rep2: alpha=0.325, lambda=0.03844
## - Fold2.Rep2: alpha=0.325, lambda=0.03844
## + Fold2.Rep2: alpha=0.550, lambda=0.03844
## - Fold2.Rep2: alpha=0.550, lambda=0.03844
## + Fold2.Rep2: alpha=0.775, lambda=0.03844
## - Fold2.Rep2: alpha=0.775, lambda=0.03844
## + Fold2.Rep2: alpha=1.000, lambda=0.03844
## - Fold2.Rep2: alpha=1.000, lambda=0.03844
## + Fold3.Rep2: alpha=0.100, lambda=0.03844
## - Fold3.Rep2: alpha=0.100, lambda=0.03844
## + Fold3.Rep2: alpha=0.325, lambda=0.03844
## - Fold3.Rep2: alpha=0.325, lambda=0.03844
## + Fold3.Rep2: alpha=0.550, lambda=0.03844
## - Fold3.Rep2: alpha=0.550, lambda=0.03844
## + Fold3.Rep2: alpha=0.775, lambda=0.03844
## - Fold3.Rep2: alpha=0.775, lambda=0.03844
## + Fold3.Rep2: alpha=1.000, lambda=0.03844
## - Fold3.Rep2: alpha=1.000, lambda=0.03844
## + Fold1.Rep3: alpha=0.100, lambda=0.03844
## - Fold1.Rep3: alpha=0.100, lambda=0.03844
## + Fold1.Rep3: alpha=0.325, lambda=0.03844
## - Fold1.Rep3: alpha=0.325, lambda=0.03844
## + Fold1.Rep3: alpha=0.550, lambda=0.03844
## - Fold1.Rep3: alpha=0.550, lambda=0.03844
## + Fold1.Rep3: alpha=0.775, lambda=0.03844
## - Fold1.Rep3: alpha=0.775, lambda=0.03844
## + Fold1.Rep3: alpha=1.000, lambda=0.03844
## - Fold1.Rep3: alpha=1.000, lambda=0.03844
## + Fold2.Rep3: alpha=0.100, lambda=0.03844
## - Fold2.Rep3: alpha=0.100, lambda=0.03844
## + Fold2.Rep3: alpha=0.325, lambda=0.03844
## - Fold2.Rep3: alpha=0.325, lambda=0.03844
## + Fold2.Rep3: alpha=0.550, lambda=0.03844
## - Fold2.Rep3: alpha=0.550, lambda=0.03844
## + Fold2.Rep3: alpha=0.775, lambda=0.03844
## - Fold2.Rep3: alpha=0.775, lambda=0.03844
## + Fold2.Rep3: alpha=1.000, lambda=0.03844
## - Fold2.Rep3: alpha=1.000, lambda=0.03844
## + Fold3.Rep3: alpha=0.100, lambda=0.03844
## - Fold3.Rep3: alpha=0.100, lambda=0.03844
## + Fold3.Rep3: alpha=0.325, lambda=0.03844
## - Fold3.Rep3: alpha=0.325, lambda=0.03844
## + Fold3.Rep3: alpha=0.550, lambda=0.03844
## - Fold3.Rep3: alpha=0.550, lambda=0.03844
## + Fold3.Rep3: alpha=0.775, lambda=0.03844
## - Fold3.Rep3: alpha=0.775, lambda=0.03844
## + Fold3.Rep3: alpha=1.000, lambda=0.03844
## - Fold3.Rep3: alpha=1.000, lambda=0.03844
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 0.1, lambda = 0.0384 on full training set
## [1] "myfit_mdl: train complete: 24.007000 secs"
## Warning in myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst
## = list(id.prefix = "Low.cor.X", : model's bestTune found at an extreme of
## tuneGrid for parameter: alpha
## Warning in myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst
## = list(id.prefix = "Low.cor.X", : model's bestTune found at an extreme of
## tuneGrid for parameter: lambda
## Length Class Mode
## a0 77 -none- numeric
## beta 693 dgCMatrix S4
## df 77 -none- numeric
## dim 2 -none- numeric
## lambda 77 -none- numeric
## dev.ratio 77 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 9 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept) P.cor.cut.fctr(0,0.5]
## -1.56176617 -0.52554046
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## 0.20519120 0.91235523
## P.cosSml.cut.fctr(0.95,0.97] P.cosSml.cut.fctr(0.98,1]
## -0.04124425 0.21804779
## P.mnkSml.3.scld
## 0.28454374
## [1] "max lambda < lambdaOpt:"
## (Intercept) P.cor.cut.fctr(0,0.5]
## -1.57125855 -0.52852129
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## 0.21700432 0.93260452
## P.cosSml.cut.fctr(0.95,0.97] P.cosSml.cut.fctr(0.98,1]
## -0.04414375 0.21963640
## P.mnkSml.3.scld
## 0.28881128
## [1] "myfit_mdl: train diagnostics complete: 24.582000 secs"
## Prediction
## Reference .none left_eye_center
## .none 7868 6347
## left_eye_center 923 2594
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.5900068 0.1841772 0.5827252 0.5972588 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 7867 6346
## left_eye_center 921 2594
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.5900835 0.1843870 0.5828013 0.5973362 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 45.714000 secs"
## id
## 1 Low.cor.X##rcv#glmnet
## feats
## 1 P.cor.cut.fctr,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.cosSml,.rnorm
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 25 23.222 0.515
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5 1 0 0.6847781
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4164392 0.801658
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.5827252 0.5972588 0
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5 1 0 0.6821994
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.2 0.4165395 0.5900835
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.5828013 0.5973362 0.184387
## max.AccuracySD.fit max.KappaSD.fit
## 1 6.985535e-05 0
## [1] "myfit_mdl: exit: 45.728000 secs"
fit.models_0_chunk_df <-
myadd_chunk(fit.models_0_chunk_df, "fit.models_0_end", major.inc = FALSE,
label.minor = "teardown")
## label step_major step_minor label_minor bgn end
## 6 fit.models_0_Low.cor.X 1 5 glmnet 183.565 229.91
## 7 fit.models_0_end 1 6 teardown 229.910 NA
## elapsed
## 6 46.345
## 7 NA
rm(ret_lst)
glb_chunks_df <- myadd_chunk(glb_chunks_df, "fit.models", major.inc = FALSE)
## label step_major step_minor label_minor bgn end elapsed
## 16 fit.models 8 0 0 79.093 229.923 150.831
## 17 fit.models 8 1 1 229.924 NA NA
fit.models_1_chunk_df <- myadd_chunk(NULL, "fit.models_1_bgn", label.minor = "setup")
## label step_major step_minor label_minor bgn end elapsed
## 1 fit.models_1_bgn 1 0 setup 235.378 NA NA
# refactor code for outliers / ensure all model runs exclude outliers in this chunk ???
#stop(here"); glb2Sav(); all.equal(glb_models_df, sav_models_df)
topindep_var <- NULL; interact_vars <- NULL;
for (mdl_id_pfx in names(glbMdlFamilies)) {
fit.models_1_chunk_df <-
myadd_chunk(fit.models_1_chunk_df, paste0("fit.models_1_", mdl_id_pfx),
major.inc = FALSE, label.minor = "setup")
indep_vars <- NULL;
if (grepl("\\.Interact", mdl_id_pfx)) {
if (is.null(topindep_var) && is.null(interact_vars)) {
# select best glmnet model upto now
dsp_models_df <- orderBy(model_sel_frmla <- get_model_sel_frmla(),
glb_models_df)
dsp_models_df <- subset(dsp_models_df,
grepl(".glmnet", id, fixed = TRUE))
bst_mdl_id <- dsp_models_df$id[1]
mdl_id_pfx <-
paste(c(head(unlist(strsplit(bst_mdl_id, "[.]")), -1), "Interact"),
collapse=".")
# select important features
if (is.null(bst_featsimp_df <-
myget_feats_importance(glb_models_lst[[bst_mdl_id]]))) {
warning("Base model for RFE.Interact: ", bst_mdl_id,
" has no important features")
next
}
topindep_ix <- 1
while (is.null(topindep_var) && (topindep_ix <= nrow(bst_featsimp_df))) {
topindep_var <- row.names(bst_featsimp_df)[topindep_ix]
if (grepl(".fctr", topindep_var, fixed=TRUE))
topindep_var <-
paste0(unlist(strsplit(topindep_var, ".fctr"))[1], ".fctr")
if (topindep_var %in% names(glbFeatsInteractionOnly)) {
topindep_var <- NULL; topindep_ix <- topindep_ix + 1
} else break
}
# select features with importance > max(10, importance of .rnorm) & is not highest
# combine factor dummy features to just the factor feature
if (length(pos_rnorm <-
grep(".rnorm", row.names(bst_featsimp_df), fixed=TRUE)) > 0)
imp_rnorm <- bst_featsimp_df[pos_rnorm, 1] else
imp_rnorm <- NA
imp_cutoff <- max(10, imp_rnorm, na.rm=TRUE)
interact_vars <-
tail(row.names(subset(bst_featsimp_df,
imp > imp_cutoff)), -1)
if (length(interact_vars) > 0) {
interact_vars <-
myadjust_interaction_feats(myextract_actual_feats(interact_vars))
interact_vars <-
interact_vars[!grepl(topindep_var, interact_vars, fixed=TRUE)]
}
### bid0_sp only
# interact_vars <- c(
# "biddable", "D.ratio.sum.TfIdf.wrds.n", "D.TfIdf.sum.stem.stop.Ratio", "D.sum.TfIdf",
# "D.TfIdf.sum.post.stop", "D.TfIdf.sum.post.stem", "D.ratio.wrds.stop.n.wrds.n", "D.chrs.uppr.n.log",
# "D.chrs.n.log", "color.fctr"
# # , "condition.fctr", "prdl.my.descr.fctr"
# )
# interact_vars <- setdiff(interact_vars, c("startprice.dgt2.is9", "color.fctr"))
###
indep_vars <- myextract_actual_feats(row.names(bst_featsimp_df))
indep_vars <- setdiff(indep_vars, topindep_var)
if (length(interact_vars) > 0) {
indep_vars <-
setdiff(indep_vars, myextract_actual_feats(interact_vars))
indep_vars <- c(indep_vars,
paste(topindep_var, setdiff(interact_vars, topindep_var),
sep = "*"))
} else indep_vars <- union(indep_vars, topindep_var)
}
}
if (is.null(indep_vars))
indep_vars <- glb_mdl_feats_lst[[mdl_id_pfx]]
if (is.null(indep_vars) && grepl("RFE\\.", mdl_id_pfx))
indep_vars <- myextract_actual_feats(predictors(rfe_fit_results))
if (is.null(indep_vars))
indep_vars <- subset(glb_feats_df, !nzv & (exclude.as.feat != 1))[, "id"]
if ((length(indep_vars) == 1) && (grepl("^%<d-%", indep_vars))) {
indep_vars <-
eval(parse(text = str_trim(unlist(strsplit(indep_vars, "%<d-%"))[2])))
}
indep_vars <- myadjust_interaction_feats(indep_vars)
if (grepl("\\.Interact", mdl_id_pfx)) {
# if (method != tail(unlist(strsplit(bst_mdl_id, "[.]")), 1)) next
if (is.null(glbMdlFamilies[[mdl_id_pfx]])) {
if (!is.null(glbMdlFamilies[["Best.Interact"]]))
glbMdlFamilies[[mdl_id_pfx]] <-
glbMdlFamilies[["Best.Interact"]]
}
}
if (!is.null(glbObsFitOutliers[[mdl_id_pfx]])) {
fitobs_df <- glbObsFit[!(glbObsFit[, glbFeatsId] %in%
glbObsFitOutliers[[mdl_id_pfx]]), ]
print(sprintf("Outliers removed: %d", nrow(glbObsFit) - nrow(fitobs_df)))
print(setdiff(glbObsFit[, glbFeatsId], fitobs_df[, glbFeatsId]))
} else fitobs_df <- glbObsFit
if (is.null(glbMdlFamilies[[mdl_id_pfx]]))
mdl_methods <- glbMdlMethods else
mdl_methods <- glbMdlFamilies[[mdl_id_pfx]]
for (method in mdl_methods) {
if (method %in% c("rpart", "rf")) {
# rpart: fubar's the tree
# rf: skip the scenario w/ .rnorm for speed
indep_vars <- setdiff(indep_vars, c(".rnorm"))
#mdl_id <- paste0(mdl_id_pfx, ".no.rnorm")
}
fit.models_1_chunk_df <- myadd_chunk(fit.models_1_chunk_df,
paste0("fit.models_1_", mdl_id_pfx), major.inc = FALSE,
label.minor = method)
ret_lst <-
myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = mdl_id_pfx,
type = glb_model_type,
tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv", # or "none" if nominalWorkflow is crashing
trainControl.number = glb_rcv_n_folds,
trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = method)),
indep_vars = indep_vars, rsp_var = glb_rsp_var,
fit_df = fitobs_df, OOB_df = glbObsOOB)
# ntv_mdl <- glmnet(x = as.matrix(
# fitobs_df[, indep_vars]),
# y = as.factor(as.character(
# fitobs_df[, glb_rsp_var])),
# family = "multinomial")
# bgn = 1; end = 100;
# ntv_mdl <- glmnet(x = as.matrix(
# subset(fitobs_df, pop.fctr != "crypto")[bgn:end, indep_vars]),
# y = as.factor(as.character(
# subset(fitobs_df, pop.fctr != "crypto")[bgn:end, glb_rsp_var])),
# family = "multinomial")
}
}
## label step_major step_minor label_minor bgn end
## 1 fit.models_1_bgn 1 0 setup 235.378 235.388
## 2 fit.models_1_All.X 1 1 setup 235.388 NA
## elapsed
## 1 0.01
## 2 NA
## label step_major step_minor label_minor bgn end
## 2 fit.models_1_All.X 1 1 setup 235.388 235.394
## 3 fit.models_1_All.X 1 2 glmnet 235.394 NA
## elapsed
## 2 0.006
## 3 NA
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: All.X##rcv#glmnet"
## [1] " indep_vars: P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm"
## [1] "myfit_mdl: setup complete: 0.702000 secs"
## + Fold1.Rep1: alpha=0.100, lambda=0.04376
## - Fold1.Rep1: alpha=0.100, lambda=0.04376
## + Fold1.Rep1: alpha=0.325, lambda=0.04376
## - Fold1.Rep1: alpha=0.325, lambda=0.04376
## + Fold1.Rep1: alpha=0.550, lambda=0.04376
## - Fold1.Rep1: alpha=0.550, lambda=0.04376
## + Fold1.Rep1: alpha=0.775, lambda=0.04376
## - Fold1.Rep1: alpha=0.775, lambda=0.04376
## + Fold1.Rep1: alpha=1.000, lambda=0.04376
## - Fold1.Rep1: alpha=1.000, lambda=0.04376
## + Fold2.Rep1: alpha=0.100, lambda=0.04376
## - Fold2.Rep1: alpha=0.100, lambda=0.04376
## + Fold2.Rep1: alpha=0.325, lambda=0.04376
## - Fold2.Rep1: alpha=0.325, lambda=0.04376
## + Fold2.Rep1: alpha=0.550, lambda=0.04376
## - Fold2.Rep1: alpha=0.550, lambda=0.04376
## + Fold2.Rep1: alpha=0.775, lambda=0.04376
## - Fold2.Rep1: alpha=0.775, lambda=0.04376
## + Fold2.Rep1: alpha=1.000, lambda=0.04376
## - Fold2.Rep1: alpha=1.000, lambda=0.04376
## + Fold3.Rep1: alpha=0.100, lambda=0.04376
## - Fold3.Rep1: alpha=0.100, lambda=0.04376
## + Fold3.Rep1: alpha=0.325, lambda=0.04376
## - Fold3.Rep1: alpha=0.325, lambda=0.04376
## + Fold3.Rep1: alpha=0.550, lambda=0.04376
## - Fold3.Rep1: alpha=0.550, lambda=0.04376
## + Fold3.Rep1: alpha=0.775, lambda=0.04376
## - Fold3.Rep1: alpha=0.775, lambda=0.04376
## + Fold3.Rep1: alpha=1.000, lambda=0.04376
## - Fold3.Rep1: alpha=1.000, lambda=0.04376
## + Fold1.Rep2: alpha=0.100, lambda=0.04376
## - Fold1.Rep2: alpha=0.100, lambda=0.04376
## + Fold1.Rep2: alpha=0.325, lambda=0.04376
## - Fold1.Rep2: alpha=0.325, lambda=0.04376
## + Fold1.Rep2: alpha=0.550, lambda=0.04376
## - Fold1.Rep2: alpha=0.550, lambda=0.04376
## + Fold1.Rep2: alpha=0.775, lambda=0.04376
## - Fold1.Rep2: alpha=0.775, lambda=0.04376
## + Fold1.Rep2: alpha=1.000, lambda=0.04376
## - Fold1.Rep2: alpha=1.000, lambda=0.04376
## + Fold2.Rep2: alpha=0.100, lambda=0.04376
## - Fold2.Rep2: alpha=0.100, lambda=0.04376
## + Fold2.Rep2: alpha=0.325, lambda=0.04376
## - Fold2.Rep2: alpha=0.325, lambda=0.04376
## + Fold2.Rep2: alpha=0.550, lambda=0.04376
## - Fold2.Rep2: alpha=0.550, lambda=0.04376
## + Fold2.Rep2: alpha=0.775, lambda=0.04376
## - Fold2.Rep2: alpha=0.775, lambda=0.04376
## + Fold2.Rep2: alpha=1.000, lambda=0.04376
## - Fold2.Rep2: alpha=1.000, lambda=0.04376
## + Fold3.Rep2: alpha=0.100, lambda=0.04376
## - Fold3.Rep2: alpha=0.100, lambda=0.04376
## + Fold3.Rep2: alpha=0.325, lambda=0.04376
## - Fold3.Rep2: alpha=0.325, lambda=0.04376
## + Fold3.Rep2: alpha=0.550, lambda=0.04376
## - Fold3.Rep2: alpha=0.550, lambda=0.04376
## + Fold3.Rep2: alpha=0.775, lambda=0.04376
## - Fold3.Rep2: alpha=0.775, lambda=0.04376
## + Fold3.Rep2: alpha=1.000, lambda=0.04376
## - Fold3.Rep2: alpha=1.000, lambda=0.04376
## + Fold1.Rep3: alpha=0.100, lambda=0.04376
## - Fold1.Rep3: alpha=0.100, lambda=0.04376
## + Fold1.Rep3: alpha=0.325, lambda=0.04376
## - Fold1.Rep3: alpha=0.325, lambda=0.04376
## + Fold1.Rep3: alpha=0.550, lambda=0.04376
## - Fold1.Rep3: alpha=0.550, lambda=0.04376
## + Fold1.Rep3: alpha=0.775, lambda=0.04376
## - Fold1.Rep3: alpha=0.775, lambda=0.04376
## + Fold1.Rep3: alpha=1.000, lambda=0.04376
## - Fold1.Rep3: alpha=1.000, lambda=0.04376
## + Fold2.Rep3: alpha=0.100, lambda=0.04376
## - Fold2.Rep3: alpha=0.100, lambda=0.04376
## + Fold2.Rep3: alpha=0.325, lambda=0.04376
## - Fold2.Rep3: alpha=0.325, lambda=0.04376
## + Fold2.Rep3: alpha=0.550, lambda=0.04376
## - Fold2.Rep3: alpha=0.550, lambda=0.04376
## + Fold2.Rep3: alpha=0.775, lambda=0.04376
## - Fold2.Rep3: alpha=0.775, lambda=0.04376
## + Fold2.Rep3: alpha=1.000, lambda=0.04376
## - Fold2.Rep3: alpha=1.000, lambda=0.04376
## + Fold3.Rep3: alpha=0.100, lambda=0.04376
## - Fold3.Rep3: alpha=0.100, lambda=0.04376
## + Fold3.Rep3: alpha=0.325, lambda=0.04376
## - Fold3.Rep3: alpha=0.325, lambda=0.04376
## + Fold3.Rep3: alpha=0.550, lambda=0.04376
## - Fold3.Rep3: alpha=0.550, lambda=0.04376
## + Fold3.Rep3: alpha=0.775, lambda=0.04376
## - Fold3.Rep3: alpha=0.775, lambda=0.04376
## + Fold3.Rep3: alpha=1.000, lambda=0.04376
## - Fold3.Rep3: alpha=1.000, lambda=0.04376
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 0.325, lambda = 9.43e-05 on full training set
## [1] "myfit_mdl: train complete: 42.779000 secs"
## Warning in myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst
## = list(id.prefix = mdl_id_pfx, : model's bestTune found at an extreme of
## tuneGrid for parameter: lambda
## Length Class Mode
## a0 81 -none- numeric
## beta 1701 dgCMatrix S4
## df 81 -none- numeric
## dim 2 -none- numeric
## lambda 81 -none- numeric
## dev.ratio 81 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 21 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept) .rnorm
## 0.99526771 -0.02090746
## P.cor P.cor.cut.fctr(0,0.5]
## 2.64474015 -0.84028965
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## -0.54462113 -0.12752847
## P.cosSml P.cosSml.cut.fctr(0.95,0.97]
## -3.63676497 0.04849048
## P.cosSml.cut.fctr(0.97,0.98] P.cosSml.cut.fctr(0.98,1]
## 0.16031813 0.37667232
## P.mnkSml.1.scld P.mnkSml.1.scld.cut.fctr(0.47,0.64]
## -0.26783206 0.10887413
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] P.mnkSml.1.scld.cut.fctr(0.87,2.9]
## 0.19672374 0.41167020
## P.mnkSml.2.scld P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## 0.11127701 -0.09447011
## P.mnkSml.2.scld.cut.fctr(1.14,1.48] P.mnkSml.2.scld.cut.fctr(1.48,4.6]
## -0.18315876 -0.37428129
## P.mnkSml.3.scld P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## 0.55362273 0.03032794
## P.mnkSml.3.scld.cut.fctr(0.27,0.35] P.mnkSml.3.scld.cut.fctr(0.35,1.1]
## 0.04257503 -0.01830337
## [1] "max lambda < lambdaOpt:"
## [1] "Feats mismatch between coefs_left & rght:"
## [1] "(Intercept)"
## [2] ".rnorm"
## [3] "P.cor"
## [4] "P.cor.cut.fctr(0,0.5]"
## [5] "P.cor.cut.fctr(0.5,0.7]"
## [6] "P.cor.cut.fctr(0.7,1]"
## [7] "P.cosSml"
## [8] "P.cosSml.cut.fctr(0.95,0.97]"
## [9] "P.cosSml.cut.fctr(0.97,0.98]"
## [10] "P.cosSml.cut.fctr(0.98,1]"
## [11] "P.mnkSml.1.scld"
## [12] "P.mnkSml.1.scld.cut.fctr(0.47,0.64]"
## [13] "P.mnkSml.1.scld.cut.fctr(0.64,0.87]"
## [14] "P.mnkSml.1.scld.cut.fctr(0.87,2.9]"
## [15] "P.mnkSml.2.scld"
## [16] "P.mnkSml.2.scld.cut.fctr(0.86,1.14]"
## [17] "P.mnkSml.2.scld.cut.fctr(1.14,1.48]"
## [18] "P.mnkSml.2.scld.cut.fctr(1.48,4.6]"
## [19] "P.mnkSml.3.scld"
## [20] "P.mnkSml.3.scld.cut.fctr(0.21,0.27]"
## [21] "P.mnkSml.3.scld.cut.fctr(0.27,0.35]"
## [22] "P.mnkSml.3.scld.cut.fctr(0.35,1.1]"
## [1] "myfit_mdl: train diagnostics complete: 43.443000 secs"
## Prediction
## Reference .none left_eye_center
## .none 8635 5580
## left_eye_center 1067 2450
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6251410 0.2050591 0.6179676 0.6322733 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 8666 5547
## left_eye_center 1099 2416
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6251128 0.2012297 0.6179385 0.6322460 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 65.026000 secs"
## id
## 1 All.X##rcv#glmnet
## feats
## 1 P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 25 42.034 1.007
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5110832 0.9943018 0.02786466 0.7013573
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4243526 0.8020528
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.6179676 0.6322733 0.03358448
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5158859 0.9953564 0.03641536 0.7003196
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.2 0.4209793 0.6251128
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.6179385 0.632246 0.2012297
## max.AccuracySD.fit max.KappaSD.fit
## 1 0.0009535491 0.01300328
## [1] "myfit_mdl: exit: 65.040000 secs"
## label step_major step_minor label_minor bgn end
## 3 fit.models_1_All.X 1 2 glmnet 235.394 300.439
## 4 fit.models_1_Final 1 3 setup 300.439 NA
## elapsed
## 3 65.045
## 4 NA
## label step_major step_minor label_minor bgn end
## 4 fit.models_1_Final 1 3 setup 300.439 300.445
## 5 fit.models_1_Final 1 4 glmnet 300.446 NA
## elapsed
## 4 0.006
## 5 NA
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Final##rcv#glmnet"
## [1] " indep_vars: P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm"
## [1] "myfit_mdl: setup complete: 0.691000 secs"
## + Fold1.Rep1: alpha=0.100, lambda=0.04376
## - Fold1.Rep1: alpha=0.100, lambda=0.04376
## + Fold1.Rep1: alpha=0.325, lambda=0.04376
## - Fold1.Rep1: alpha=0.325, lambda=0.04376
## + Fold1.Rep1: alpha=0.550, lambda=0.04376
## - Fold1.Rep1: alpha=0.550, lambda=0.04376
## + Fold1.Rep1: alpha=0.775, lambda=0.04376
## - Fold1.Rep1: alpha=0.775, lambda=0.04376
## + Fold1.Rep1: alpha=1.000, lambda=0.04376
## - Fold1.Rep1: alpha=1.000, lambda=0.04376
## + Fold2.Rep1: alpha=0.100, lambda=0.04376
## - Fold2.Rep1: alpha=0.100, lambda=0.04376
## + Fold2.Rep1: alpha=0.325, lambda=0.04376
## - Fold2.Rep1: alpha=0.325, lambda=0.04376
## + Fold2.Rep1: alpha=0.550, lambda=0.04376
## - Fold2.Rep1: alpha=0.550, lambda=0.04376
## + Fold2.Rep1: alpha=0.775, lambda=0.04376
## - Fold2.Rep1: alpha=0.775, lambda=0.04376
## + Fold2.Rep1: alpha=1.000, lambda=0.04376
## - Fold2.Rep1: alpha=1.000, lambda=0.04376
## + Fold3.Rep1: alpha=0.100, lambda=0.04376
## - Fold3.Rep1: alpha=0.100, lambda=0.04376
## + Fold3.Rep1: alpha=0.325, lambda=0.04376
## - Fold3.Rep1: alpha=0.325, lambda=0.04376
## + Fold3.Rep1: alpha=0.550, lambda=0.04376
## - Fold3.Rep1: alpha=0.550, lambda=0.04376
## + Fold3.Rep1: alpha=0.775, lambda=0.04376
## - Fold3.Rep1: alpha=0.775, lambda=0.04376
## + Fold3.Rep1: alpha=1.000, lambda=0.04376
## - Fold3.Rep1: alpha=1.000, lambda=0.04376
## + Fold1.Rep2: alpha=0.100, lambda=0.04376
## - Fold1.Rep2: alpha=0.100, lambda=0.04376
## + Fold1.Rep2: alpha=0.325, lambda=0.04376
## - Fold1.Rep2: alpha=0.325, lambda=0.04376
## + Fold1.Rep2: alpha=0.550, lambda=0.04376
## - Fold1.Rep2: alpha=0.550, lambda=0.04376
## + Fold1.Rep2: alpha=0.775, lambda=0.04376
## - Fold1.Rep2: alpha=0.775, lambda=0.04376
## + Fold1.Rep2: alpha=1.000, lambda=0.04376
## - Fold1.Rep2: alpha=1.000, lambda=0.04376
## + Fold2.Rep2: alpha=0.100, lambda=0.04376
## - Fold2.Rep2: alpha=0.100, lambda=0.04376
## + Fold2.Rep2: alpha=0.325, lambda=0.04376
## - Fold2.Rep2: alpha=0.325, lambda=0.04376
## + Fold2.Rep2: alpha=0.550, lambda=0.04376
## - Fold2.Rep2: alpha=0.550, lambda=0.04376
## + Fold2.Rep2: alpha=0.775, lambda=0.04376
## - Fold2.Rep2: alpha=0.775, lambda=0.04376
## + Fold2.Rep2: alpha=1.000, lambda=0.04376
## - Fold2.Rep2: alpha=1.000, lambda=0.04376
## + Fold3.Rep2: alpha=0.100, lambda=0.04376
## - Fold3.Rep2: alpha=0.100, lambda=0.04376
## + Fold3.Rep2: alpha=0.325, lambda=0.04376
## - Fold3.Rep2: alpha=0.325, lambda=0.04376
## + Fold3.Rep2: alpha=0.550, lambda=0.04376
## - Fold3.Rep2: alpha=0.550, lambda=0.04376
## + Fold3.Rep2: alpha=0.775, lambda=0.04376
## - Fold3.Rep2: alpha=0.775, lambda=0.04376
## + Fold3.Rep2: alpha=1.000, lambda=0.04376
## - Fold3.Rep2: alpha=1.000, lambda=0.04376
## + Fold1.Rep3: alpha=0.100, lambda=0.04376
## - Fold1.Rep3: alpha=0.100, lambda=0.04376
## + Fold1.Rep3: alpha=0.325, lambda=0.04376
## - Fold1.Rep3: alpha=0.325, lambda=0.04376
## + Fold1.Rep3: alpha=0.550, lambda=0.04376
## - Fold1.Rep3: alpha=0.550, lambda=0.04376
## + Fold1.Rep3: alpha=0.775, lambda=0.04376
## - Fold1.Rep3: alpha=0.775, lambda=0.04376
## + Fold1.Rep3: alpha=1.000, lambda=0.04376
## - Fold1.Rep3: alpha=1.000, lambda=0.04376
## + Fold2.Rep3: alpha=0.100, lambda=0.04376
## - Fold2.Rep3: alpha=0.100, lambda=0.04376
## + Fold2.Rep3: alpha=0.325, lambda=0.04376
## - Fold2.Rep3: alpha=0.325, lambda=0.04376
## + Fold2.Rep3: alpha=0.550, lambda=0.04376
## - Fold2.Rep3: alpha=0.550, lambda=0.04376
## + Fold2.Rep3: alpha=0.775, lambda=0.04376
## - Fold2.Rep3: alpha=0.775, lambda=0.04376
## + Fold2.Rep3: alpha=1.000, lambda=0.04376
## - Fold2.Rep3: alpha=1.000, lambda=0.04376
## + Fold3.Rep3: alpha=0.100, lambda=0.04376
## - Fold3.Rep3: alpha=0.100, lambda=0.04376
## + Fold3.Rep3: alpha=0.325, lambda=0.04376
## - Fold3.Rep3: alpha=0.325, lambda=0.04376
## + Fold3.Rep3: alpha=0.550, lambda=0.04376
## - Fold3.Rep3: alpha=0.550, lambda=0.04376
## + Fold3.Rep3: alpha=0.775, lambda=0.04376
## - Fold3.Rep3: alpha=0.775, lambda=0.04376
## + Fold3.Rep3: alpha=1.000, lambda=0.04376
## - Fold3.Rep3: alpha=1.000, lambda=0.04376
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 0.325, lambda = 9.43e-05 on full training set
## [1] "myfit_mdl: train complete: 42.828000 secs"
## Warning in myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst
## = list(id.prefix = mdl_id_pfx, : model's bestTune found at an extreme of
## tuneGrid for parameter: lambda
## Length Class Mode
## a0 81 -none- numeric
## beta 1701 dgCMatrix S4
## df 81 -none- numeric
## dim 2 -none- numeric
## lambda 81 -none- numeric
## dev.ratio 81 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 21 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept) .rnorm
## 0.99526771 -0.02090746
## P.cor P.cor.cut.fctr(0,0.5]
## 2.64474015 -0.84028965
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## -0.54462113 -0.12752847
## P.cosSml P.cosSml.cut.fctr(0.95,0.97]
## -3.63676497 0.04849048
## P.cosSml.cut.fctr(0.97,0.98] P.cosSml.cut.fctr(0.98,1]
## 0.16031813 0.37667232
## P.mnkSml.1.scld P.mnkSml.1.scld.cut.fctr(0.47,0.64]
## -0.26783206 0.10887413
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] P.mnkSml.1.scld.cut.fctr(0.87,2.9]
## 0.19672374 0.41167020
## P.mnkSml.2.scld P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## 0.11127701 -0.09447011
## P.mnkSml.2.scld.cut.fctr(1.14,1.48] P.mnkSml.2.scld.cut.fctr(1.48,4.6]
## -0.18315876 -0.37428129
## P.mnkSml.3.scld P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## 0.55362273 0.03032794
## P.mnkSml.3.scld.cut.fctr(0.27,0.35] P.mnkSml.3.scld.cut.fctr(0.35,1.1]
## 0.04257503 -0.01830337
## [1] "max lambda < lambdaOpt:"
## [1] "Feats mismatch between coefs_left & rght:"
## [1] "(Intercept)"
## [2] ".rnorm"
## [3] "P.cor"
## [4] "P.cor.cut.fctr(0,0.5]"
## [5] "P.cor.cut.fctr(0.5,0.7]"
## [6] "P.cor.cut.fctr(0.7,1]"
## [7] "P.cosSml"
## [8] "P.cosSml.cut.fctr(0.95,0.97]"
## [9] "P.cosSml.cut.fctr(0.97,0.98]"
## [10] "P.cosSml.cut.fctr(0.98,1]"
## [11] "P.mnkSml.1.scld"
## [12] "P.mnkSml.1.scld.cut.fctr(0.47,0.64]"
## [13] "P.mnkSml.1.scld.cut.fctr(0.64,0.87]"
## [14] "P.mnkSml.1.scld.cut.fctr(0.87,2.9]"
## [15] "P.mnkSml.2.scld"
## [16] "P.mnkSml.2.scld.cut.fctr(0.86,1.14]"
## [17] "P.mnkSml.2.scld.cut.fctr(1.14,1.48]"
## [18] "P.mnkSml.2.scld.cut.fctr(1.48,4.6]"
## [19] "P.mnkSml.3.scld"
## [20] "P.mnkSml.3.scld.cut.fctr(0.21,0.27]"
## [21] "P.mnkSml.3.scld.cut.fctr(0.27,0.35]"
## [22] "P.mnkSml.3.scld.cut.fctr(0.35,1.1]"
## [1] "myfit_mdl: train diagnostics complete: 43.988000 secs"
## Prediction
## Reference .none left_eye_center
## .none 8635 5580
## left_eye_center 1067 2450
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6251410 0.2050591 0.6179676 0.6322733 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 8666 5547
## left_eye_center 1099 2416
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6251128 0.2012297 0.6179385 0.6322460 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 66.657000 secs"
## id
## 1 Final##rcv#glmnet
## feats
## 1 P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 25 42.097 1.009
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5110832 0.9943018 0.02786466 0.7013573
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4243526 0.8020528
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.6179676 0.6322733 0.03358448
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5158859 0.9953564 0.03641536 0.7003196
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.2 0.4209793 0.6251128
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.6179385 0.632246 0.2012297
## max.AccuracySD.fit max.KappaSD.fit
## 1 0.0009535491 0.01300328
## [1] "myfit_mdl: exit: 66.674000 secs"
## label step_major step_minor label_minor bgn end
## 5 fit.models_1_Final 1 4 glmnet 300.446 367.125
## 6 fit.models_1_Final 1 5 glm 367.126 NA
## elapsed
## 5 66.679
## 6 NA
## [1] "myfit_mdl: enter: 0.001000 secs"
## [1] "fitting model: Final##rcv#glm"
## [1] " indep_vars: P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm"
## [1] "myfit_mdl: setup complete: 1.290000 secs"
## + Fold1.Rep1: parameter=none
## - Fold1.Rep1: parameter=none
## + Fold2.Rep1: parameter=none
## - Fold2.Rep1: parameter=none
## + Fold3.Rep1: parameter=none
## - Fold3.Rep1: parameter=none
## + Fold1.Rep2: parameter=none
## - Fold1.Rep2: parameter=none
## + Fold2.Rep2: parameter=none
## - Fold2.Rep2: parameter=none
## + Fold3.Rep2: parameter=none
## - Fold3.Rep2: parameter=none
## + Fold1.Rep3: parameter=none
## - Fold1.Rep3: parameter=none
## + Fold2.Rep3: parameter=none
## - Fold2.Rep3: parameter=none
## + Fold3.Rep3: parameter=none
## - Fold3.Rep3: parameter=none
## Aggregating results
## Fitting final model on full training set
## [1] "myfit_mdl: train complete: 4.841000 secs"
##
## Call:
## NULL
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -1.3507 -0.7099 -0.5057 -0.3605 2.7467
##
## Coefficients:
## Estimate Std. Error z value
## (Intercept) 1.259e+00 7.477e-01 1.683
## .rnorm -2.137e-02 1.976e-02 -1.082
## P.cor 2.850e+00 2.703e-01 10.544
## `P.cor.cut.fctr(0,0.5]` -1.030e+00 2.223e-01 -4.632
## `P.cor.cut.fctr(0.5,0.7]` -7.772e-01 2.612e-01 -2.976
## `P.cor.cut.fctr(0.7,1]` -3.771e-01 2.933e-01 -1.286
## P.cosSml -3.797e+00 8.179e-01 -4.642
## `P.cosSml.cut.fctr(0.95,0.97]` 5.781e-02 7.588e-02 0.762
## `P.cosSml.cut.fctr(0.97,0.98]` 1.657e-01 8.556e-02 1.936
## `P.cosSml.cut.fctr(0.98,1]` 3.725e-01 8.890e-02 4.190
## P.mnkSml.1.scld -5.005e+00 1.518e+00 -3.296
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` 1.911e-01 1.318e-01 1.451
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` 3.344e-01 1.745e-01 1.917
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` 5.981e-01 2.218e-01 2.696
## P.mnkSml.2.scld 6.589e+00 2.080e+00 3.167
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` -1.962e-01 1.613e-01 -1.216
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` -3.403e-01 2.090e-01 -1.629
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` -5.819e-01 2.633e-01 -2.210
## P.mnkSml.3.scld -1.491e+01 5.145e+00 -2.899
## `P.mnkSml.3.scld.cut.fctr(0.21,0.27]` 5.067e-02 1.203e-01 0.421
## `P.mnkSml.3.scld.cut.fctr(0.27,0.35]` 5.906e-02 1.595e-01 0.370
## `P.mnkSml.3.scld.cut.fctr(0.35,1.1]` 5.867e-05 2.090e-01 0.000
## Pr(>|z|)
## (Intercept) 0.09232 .
## .rnorm 0.27931
## P.cor < 2e-16 ***
## `P.cor.cut.fctr(0,0.5]` 3.62e-06 ***
## `P.cor.cut.fctr(0.5,0.7]` 0.00292 **
## `P.cor.cut.fctr(0.7,1]` 0.19855
## P.cosSml 3.45e-06 ***
## `P.cosSml.cut.fctr(0.95,0.97]` 0.44619
## `P.cosSml.cut.fctr(0.97,0.98]` 0.05284 .
## `P.cosSml.cut.fctr(0.98,1]` 2.79e-05 ***
## P.mnkSml.1.scld 0.00098 ***
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` 0.14686
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` 0.05529 .
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` 0.00701 **
## P.mnkSml.2.scld 0.00154 **
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` 0.22382
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` 0.10338
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` 0.02710 *
## P.mnkSml.3.scld 0.00374 **
## `P.mnkSml.3.scld.cut.fctr(0.21,0.27]` 0.67368
## `P.mnkSml.3.scld.cut.fctr(0.27,0.35]` 0.71122
## `P.mnkSml.3.scld.cut.fctr(0.35,1.1]` 0.99978
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 17664 on 17731 degrees of freedom
## Residual deviance: 16203 on 17710 degrees of freedom
## AIC: 16247
##
## Number of Fisher Scoring iterations: 5
##
## [1] "myfit_mdl: train diagnostics complete: 8.179000 secs"
## Prediction
## Reference .none left_eye_center
## .none 8734 5481
## left_eye_center 1089 2428
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6294834 0.2073476 0.6223262 0.6365982 0.8016580
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## Prediction
## Reference .none left_eye_center
## .none 8812 5401
## left_eye_center 1133 2382
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6314305 0.2043030 0.6242799 0.6385380 0.8017261
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 30.135000 secs"
## id
## 1 Final##rcv#glm
## feats
## 1 P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 1 3.516 0.257
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5113383 0.9931059 0.02957066 0.7019435
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4249956 0.8018272
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.6223262 0.6365982 0.03797967
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB max.AUCROCR.OOB
## 1 0.5176724 0.9938085 0.04153627 0.7008478
## opt.prob.threshold.OOB max.f.score.OOB max.Accuracy.OOB
## 1 0.2 0.4216676 0.6314305
## max.AccuracyLower.OOB max.AccuracyUpper.OOB max.Kappa.OOB
## 1 0.6242799 0.638538 0.204303
## max.AccuracySD.fit max.KappaSD.fit
## 1 0.001338795 0.009045356
## [1] "myfit_mdl: exit: 30.148000 secs"
# Check if other preProcess methods improve model performance
fit.models_1_chunk_df <-
myadd_chunk(fit.models_1_chunk_df, "fit.models_1_preProc", major.inc = FALSE,
label.minor = "preProc")
## label step_major step_minor label_minor bgn end
## 6 fit.models_1_Final 1 5 glm 367.126 397.8
## 7 fit.models_1_preProc 1 6 preProc 397.801 NA
## elapsed
## 6 30.674
## 7 NA
mdl_id <- orderBy(get_model_sel_frmla(), glb_models_df)[1, "id"]
indep_vars_vctr <- trim(unlist(strsplit(glb_models_df[glb_models_df$id == mdl_id,
"feats"], "[,]")))
method <- tail(unlist(strsplit(mdl_id, "[.]")), 1)
mdl_id_pfx <- paste0(head(unlist(strsplit(mdl_id, "[.]")), -1), collapse = ".")
if (!is.null(glbObsFitOutliers[[mdl_id_pfx]])) {
fitobs_df <- glbObsFit[!(glbObsFit[, glbFeatsId] %in%
glbObsFitOutliers[[mdl_id_pfx]]), ]
print(sprintf("Outliers removed: %d", nrow(glbObsFit) - nrow(fitobs_df)))
print(setdiff(glbObsFit[, glbFeatsId], fitobs_df[, glbFeatsId]))
} else fitobs_df <- glbObsFit
for (prePr in glb_preproc_methods) {
# The operations are applied in this order:
# Box-Cox/Yeo-Johnson transformation, centering, scaling, range, imputation, PCA, ICA then spatial sign.
ret_lst <- myfit_mdl(mdl_specs_lst=myinit_mdl_specs_lst(mdl_specs_lst=list(
id.prefix=mdl_id_pfx,
type=glb_model_type, tune.df=glbMdlTuneParams,
trainControl.method="repeatedcv",
trainControl.number=glb_rcv_n_folds,
trainControl.repeats=glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method=method, train.preProcess=prePr)),
indep_vars=indep_vars_vctr, rsp_var=glb_rsp_var,
fit_df=fitobs_df, OOB_df=glbObsOOB)
}
# If (All|RFE).X.glm is less accurate than Low.Cor.X.glm
# check NA coefficients & filter appropriate terms in indep_vars_vctr
# if (method == "glm") {
# orig_glm <- glb_models_lst[[paste0(mdl_id, ".", model_method)]]$finalModel
# orig_glm <- glb_models_lst[["All.X.glm"]]$finalModel; print(summary(orig_glm))
# orig_glm <- glb_models_lst[["RFE.X.glm"]]$finalModel; print(summary(orig_glm))
# require(car)
# vif_orig_glm <- vif(orig_glm); print(vif_orig_glm)
# # if vif errors out with "there are aliased coefficients in the model"
# alias_orig_glm <- alias(orig_glm); alias_complete_orig_glm <- (alias_orig_glm$Complete > 0); alias_complete_orig_glm <- alias_complete_orig_glm[rowSums(alias_complete_orig_glm) > 0, colSums(alias_complete_orig_glm) > 0]; print(alias_complete_orig_glm)
# print(vif_orig_glm[!is.na(vif_orig_glm) & (vif_orig_glm == Inf)])
# print(which.max(vif_orig_glm))
# print(sort(vif_orig_glm[vif_orig_glm >= 1.0e+03], decreasing=TRUE))
# glbObsFit[c(1143, 3637, 3953, 4105), c("UniqueID", "Popular", "H.P.quandary", "Headline")]
# glb_feats_df[glb_feats_df$id %in% grep("[HSA]\\.chrs.n.log", glb_feats_df$id, value=TRUE) | glb_feats_df$cor.high.X %in% grep("[HSA]\\.chrs.n.log", glb_feats_df$id, value=TRUE), ]
# all.equal(glbObsAll$S.chrs.uppr.n.log, glbObsAll$A.chrs.uppr.n.log)
# cor(glbObsAll$S.T.herald, glbObsAll$S.T.tribun)
# mydspObs(Abstract.contains="[Dd]iar", cols=("Abstract"), all=TRUE)
# subset(glb_feats_df, cor.y.abs <= glb_feats_df[glb_feats_df$id == ".rnorm", "cor.y.abs"])
# corxx_mtrx <- cor(data.matrix(glbObsAll[, setdiff(names(glbObsAll), myfind_chr_cols_df(glbObsAll))]), use="pairwise.complete.obs"); abs_corxx_mtrx <- abs(corxx_mtrx); diag(abs_corxx_mtrx) <- 0
# which.max(abs_corxx_mtrx["S.T.tribun", ])
# abs_corxx_mtrx["A.npnct08.log", "S.npnct08.log"]
# step_glm <- step(orig_glm)
# }
# Since caret does not optimize rpart well
# if (method == "rpart")
# ret_lst <- myfit_mdl(mdl_id=paste0(mdl_id_pfx, ".cp.0"), model_method=method,
# indep_vars_vctr=indep_vars_vctr,
# model_type=glb_model_type,
# rsp_var=glb_rsp_var,
# fit_df=glbObsFit, OOB_df=glbObsOOB,
# n_cv_folds=0, tune_models_df=data.frame(parameter="cp", min=0.0, max=0.0, by=0.1))
# User specified
# Ensure at least 2 vars in each regression; else varImp crashes
# sav_models_lst <- glb_models_lst; sav_models_df <- glb_models_df; sav_featsimp_df <- glb_featsimp_df; all.equal(sav_featsimp_df, glb_featsimp_df)
# glb_models_lst <- sav_models_lst; glb_models_df <- sav_models_df; glm_featsimp_df <- sav_featsimp_df
# easier to exclude features
# require(gdata) # needed for trim
# mdl_id <- "";
# indep_vars_vctr <- head(subset(glb_models_df, grepl("All\\.X\\.", mdl_id), select=feats)
# , 1)[, "feats"]
# indep_vars_vctr <- trim(unlist(strsplit(indep_vars_vctr, "[,]")))
# indep_vars_vctr <- setdiff(indep_vars_vctr, ".rnorm")
# easier to include features
#stop(here"); sav_models_df <- glb_models_df; glb_models_df <- sav_models_df
# !_sp
# mdl_id <- "csm"; indep_vars_vctr <- c(NULL
# ,"prdline.my.fctr", "prdline.my.fctr:.clusterid.fctr"
# ,"prdline.my.fctr*biddable"
# #,"prdline.my.fctr*startprice.log"
# #,"prdline.my.fctr*startprice.diff"
# ,"prdline.my.fctr*condition.fctr"
# ,"prdline.my.fctr*D.terms.post.stop.n"
# #,"prdline.my.fctr*D.terms.post.stem.n"
# ,"prdline.my.fctr*cellular.fctr"
# # ,"<feat1>:<feat2>"
# )
# for (method in glbMdlMethods) {
# ret_lst <- myfit_mdl(mdl_id=mdl_id, model_method=method,
# indep_vars_vctr=indep_vars_vctr,
# model_type=glb_model_type,
# rsp_var=glb_rsp_var,
# fit_df=glbObsFit, OOB_df=glbObsOOB,
# n_cv_folds=glb_rcv_n_folds, tune_models_df=glbMdlTuneParams)
# csm_mdl_id <- paste0(mdl_id, ".", method)
# csm_featsimp_df <- myget_feats_importance(glb_models_lst[[paste0(mdl_id, ".",
# method)]]); print(head(csm_featsimp_df))
# }
###
# Ntv.1.lm <- lm(reformulate(indep_vars_vctr, glb_rsp_var), glbObsTrn); print(summary(Ntv.1.lm))
#glb_models_df[, "max.Accuracy.OOB", FALSE]
#varImp(glb_models_lst[["Low.cor.X.glm"]])
#orderBy(~ -Overall, varImp(glb_models_lst[["All.X.2.glm"]])$imp)
#orderBy(~ -Overall, varImp(glb_models_lst[["All.X.3.glm"]])$imp)
#glb_feats_df[grepl("npnct28", glb_feats_df$id), ]
# User specified bivariate models
# indep_vars_vctr_lst <- list()
# for (feat in setdiff(names(glbObsFit),
# union(glb_rsp_var, glbFeatsExclude)))
# indep_vars_vctr_lst[["feat"]] <- feat
# User specified combinatorial models
# indep_vars_vctr_lst <- list()
# combn_mtrx <- combn(c("<feat1_name>", "<feat2_name>", "<featn_name>"),
# <num_feats_to_choose>)
# for (combn_ix in 1:ncol(combn_mtrx))
# #print(combn_mtrx[, combn_ix])
# indep_vars_vctr_lst[[combn_ix]] <- combn_mtrx[, combn_ix]
# template for myfit_mdl
# rf is hard-coded in caret to recognize only Accuracy / Kappa evaluation metrics
# only for OOB in trainControl ?
# ret_lst <- myfit_mdl_fn(mdl_id=paste0(mdl_id_pfx, ""), model_method=method,
# indep_vars_vctr=indep_vars_vctr,
# rsp_var=glb_rsp_var,
# fit_df=glbObsFit, OOB_df=glbObsOOB,
# n_cv_folds=glb_rcv_n_folds, tune_models_df=glbMdlTuneParams,
# model_loss_mtrx=glbMdlMetric_terms,
# model_summaryFunction=glbMdlMetricSummaryFn,
# model_metric=glbMdlMetricSummary,
# model_metric_maximize=glbMdlMetricMaximize)
# Simplify a model
# fit_df <- glbObsFit; glb_mdl <- step(<complex>_mdl)
# Non-caret models
# rpart_area_mdl <- rpart(reformulate("Area", response=glb_rsp_var),
# data=glbObsFit, #method="class",
# control=rpart.control(cp=0.12),
# parms=list(loss=glbMdlMetric_terms))
# print("rpart_sel_wlm_mdl"); prp(rpart_sel_wlm_mdl)
#
print(glb_models_df)
## id
## MFO###myMFO_classfr MFO###myMFO_classfr
## Random###myrandom_classfr Random###myrandom_classfr
## Max.cor.Y.rcv.1X1###glmnet Max.cor.Y.rcv.1X1###glmnet
## Max.cor.Y##rcv#rpart Max.cor.Y##rcv#rpart
## Interact.High.cor.Y##rcv#glmnet Interact.High.cor.Y##rcv#glmnet
## Low.cor.X##rcv#glmnet Low.cor.X##rcv#glmnet
## All.X##rcv#glmnet All.X##rcv#glmnet
## Final##rcv#glmnet Final##rcv#glmnet
## Final##rcv#glm Final##rcv#glm
## feats
## MFO###myMFO_classfr .rnorm
## Random###myrandom_classfr .rnorm
## Max.cor.Y.rcv.1X1###glmnet P.cor.cut.fctr,P.cosSml.cut.fctr
## Max.cor.Y##rcv#rpart P.cor.cut.fctr,P.cosSml.cut.fctr
## Interact.High.cor.Y##rcv#glmnet P.cor.cut.fctr,P.cosSml.cut.fctr,P.cor.cut.fctr:P.cor.cut.fctr,P.cor.cut.fctr:P.mnkSml.3.scld,P.cor.cut.fctr:P.mnkSml.3.scld.cut.fctr,P.cor.cut.fctr:P.mnkSml.2.scld.cut.fctr
## Low.cor.X##rcv#glmnet P.cor.cut.fctr,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.cosSml,.rnorm
## All.X##rcv#glmnet P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## Final##rcv#glmnet P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## Final##rcv#glm P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns min.elapsedtime.everything
## MFO###myMFO_classfr 0 0.455
## Random###myrandom_classfr 0 0.305
## Max.cor.Y.rcv.1X1###glmnet 0 1.294
## Max.cor.Y##rcv#rpart 1 3.201
## Interact.High.cor.Y##rcv#glmnet 25 50.034
## Low.cor.X##rcv#glmnet 25 23.222
## All.X##rcv#glmnet 25 42.034
## Final##rcv#glmnet 25 42.097
## Final##rcv#glm 1 3.516
## min.elapsedtime.final max.AUCpROC.fit
## MFO###myMFO_classfr 0.004 0.5000000
## Random###myrandom_classfr 0.004 0.4988761
## Max.cor.Y.rcv.1X1###glmnet 0.415 0.5000000
## Max.cor.Y##rcv#rpart 0.051 0.5000000
## Interact.High.cor.Y##rcv#glmnet 1.394 0.5072711
## Low.cor.X##rcv#glmnet 0.515 0.5000000
## All.X##rcv#glmnet 1.007 0.5110832
## Final##rcv#glmnet 1.009 0.5110832
## Final##rcv#glm 0.257 0.5113383
## max.Sens.fit max.Spec.fit max.AUCROCR.fit
## MFO###myMFO_classfr 1.0000000 0.00000000 0.5000000
## Random###myrandom_classfr 0.8001407 0.19761160 0.4977388
## Max.cor.Y.rcv.1X1###glmnet 1.0000000 0.00000000 0.6853218
## Max.cor.Y##rcv#rpart 1.0000000 0.00000000 0.5000000
## Interact.High.cor.Y##rcv#glmnet 0.9960605 0.01848166 0.6898095
## Low.cor.X##rcv#glmnet 1.0000000 0.00000000 0.6847781
## All.X##rcv#glmnet 0.9943018 0.02786466 0.7013573
## Final##rcv#glmnet 0.9943018 0.02786466 0.7013573
## Final##rcv#glm 0.9931059 0.02957066 0.7019435
## opt.prob.threshold.fit max.f.score.fit
## MFO###myMFO_classfr 0.1 0.3310273
## Random###myrandom_classfr 0.1 0.3310273
## Max.cor.Y.rcv.1X1###glmnet 0.2 0.4164392
## Max.cor.Y##rcv#rpart 0.1 0.3310273
## Interact.High.cor.Y##rcv#glmnet 0.2 0.4164392
## Low.cor.X##rcv#glmnet 0.2 0.4164392
## All.X##rcv#glmnet 0.2 0.4243526
## Final##rcv#glmnet 0.2 0.4243526
## Final##rcv#glm 0.2 0.4249956
## max.Accuracy.fit max.AccuracyLower.fit
## MFO###myMFO_classfr 0.1983420 0.1924945
## Random###myrandom_classfr 0.1983420 0.1924945
## Max.cor.Y.rcv.1X1###glmnet 0.5900068 0.5827252
## Max.cor.Y##rcv#rpart 0.8016580 0.1924945
## Interact.High.cor.Y##rcv#glmnet 0.8020339 0.5827252
## Low.cor.X##rcv#glmnet 0.8016580 0.5827252
## All.X##rcv#glmnet 0.8020528 0.6179676
## Final##rcv#glmnet 0.8020528 0.6179676
## Final##rcv#glm 0.8018272 0.6223262
## max.AccuracyUpper.fit max.Kappa.fit
## MFO###myMFO_classfr 0.2042885 0.00000000
## Random###myrandom_classfr 0.2042885 0.00000000
## Max.cor.Y.rcv.1X1###glmnet 0.5972588 0.18417719
## Max.cor.Y##rcv#rpart 0.2042885 0.00000000
## Interact.High.cor.Y##rcv#glmnet 0.5972588 0.02626314
## Low.cor.X##rcv#glmnet 0.5972588 0.00000000
## All.X##rcv#glmnet 0.6322733 0.03358448
## Final##rcv#glmnet 0.6322733 0.03358448
## Final##rcv#glm 0.6365982 0.03797967
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB
## MFO###myMFO_classfr 0.5000000 1.0000000 0.00000000
## Random###myrandom_classfr 0.5027819 0.8024344 0.20312945
## Max.cor.Y.rcv.1X1###glmnet 0.5000000 1.0000000 0.00000000
## Max.cor.Y##rcv#rpart 0.5000000 1.0000000 0.00000000
## Interact.High.cor.Y##rcv#glmnet 0.5056058 0.9958489 0.01536273
## Low.cor.X##rcv#glmnet 0.5000000 1.0000000 0.00000000
## All.X##rcv#glmnet 0.5158859 0.9953564 0.03641536
## Final##rcv#glmnet 0.5158859 0.9953564 0.03641536
## Final##rcv#glm 0.5176724 0.9938085 0.04153627
## max.AUCROCR.OOB opt.prob.threshold.OOB
## MFO###myMFO_classfr 0.5000000 0.1
## Random###myrandom_classfr 0.5012493 0.1
## Max.cor.Y.rcv.1X1###glmnet 0.6809790 0.2
## Max.cor.Y##rcv#rpart 0.5000000 0.1
## Interact.High.cor.Y##rcv#glmnet 0.6835589 0.2
## Low.cor.X##rcv#glmnet 0.6821994 0.2
## All.X##rcv#glmnet 0.7003196 0.2
## Final##rcv#glmnet 0.7003196 0.2
## Final##rcv#glm 0.7008478 0.2
## max.f.score.OOB max.Accuracy.OOB
## MFO###myMFO_classfr 0.3309325 0.1982739
## Random###myrandom_classfr 0.3309325 0.1982739
## Max.cor.Y.rcv.1X1###glmnet 0.4165395 0.5900835
## Max.cor.Y##rcv#rpart 0.3309325 0.1982739
## Interact.High.cor.Y##rcv#glmnet 0.4165395 0.5900835
## Low.cor.X##rcv#glmnet 0.4165395 0.5900835
## All.X##rcv#glmnet 0.4209793 0.6251128
## Final##rcv#glmnet 0.4209793 0.6251128
## Final##rcv#glm 0.4216676 0.6314305
## max.AccuracyLower.OOB
## MFO###myMFO_classfr 0.1924266
## Random###myrandom_classfr 0.1924266
## Max.cor.Y.rcv.1X1###glmnet 0.5828013
## Max.cor.Y##rcv#rpart 0.1924266
## Interact.High.cor.Y##rcv#glmnet 0.5828013
## Low.cor.X##rcv#glmnet 0.5828013
## All.X##rcv#glmnet 0.6179385
## Final##rcv#glmnet 0.6179385
## Final##rcv#glm 0.6242799
## max.AccuracyUpper.OOB max.Kappa.OOB
## MFO###myMFO_classfr 0.2042204 0.0000000
## Random###myrandom_classfr 0.2042204 0.0000000
## Max.cor.Y.rcv.1X1###glmnet 0.5973362 0.1843870
## Max.cor.Y##rcv#rpart 0.2042204 0.0000000
## Interact.High.cor.Y##rcv#glmnet 0.5973362 0.1843870
## Low.cor.X##rcv#glmnet 0.5973362 0.1843870
## All.X##rcv#glmnet 0.6322460 0.2012297
## Final##rcv#glmnet 0.6322460 0.2012297
## Final##rcv#glm 0.6385380 0.2043030
## max.AccuracySD.fit max.KappaSD.fit
## MFO###myMFO_classfr NA NA
## Random###myrandom_classfr NA NA
## Max.cor.Y.rcv.1X1###glmnet NA NA
## Max.cor.Y##rcv#rpart 6.660446e-05 0.000000000
## Interact.High.cor.Y##rcv#glmnet 7.680048e-04 0.005680814
## Low.cor.X##rcv#glmnet 6.985535e-05 0.000000000
## All.X##rcv#glmnet 9.535491e-04 0.013003280
## Final##rcv#glmnet 9.535491e-04 0.013003280
## Final##rcv#glm 1.338795e-03 0.009045356
rm(ret_lst)
fit.models_1_chunk_df <-
myadd_chunk(fit.models_1_chunk_df, "fit.models_1_end", major.inc = FALSE,
label.minor = "teardown")
## label step_major step_minor label_minor bgn end
## 7 fit.models_1_preProc 1 6 preProc 397.801 397.869
## 8 fit.models_1_end 1 7 teardown 397.869 NA
## elapsed
## 7 0.068
## 8 NA
glb_chunks_df <- myadd_chunk(glb_chunks_df, "fit.models", major.inc = FALSE)
## label step_major step_minor label_minor bgn end elapsed
## 17 fit.models 8 1 1 229.924 397.877 167.954
## 18 fit.models 8 2 2 397.878 NA NA
fit.models_2_chunk_df <-
myadd_chunk(NULL, "fit.models_2_bgn", label.minor = "setup")
## label step_major step_minor label_minor bgn end elapsed
## 1 fit.models_2_bgn 1 0 setup 408.646 NA NA
plt_models_df <- glb_models_df[, -grep("SD|Upper|Lower", names(glb_models_df))]
for (var in grep("^min.", names(plt_models_df), value=TRUE)) {
plt_models_df[, sub("min.", "inv.", var)] <-
#ifelse(all(is.na(tmp <- plt_models_df[, var])), NA, 1.0 / tmp)
1.0 / plt_models_df[, var]
plt_models_df <- plt_models_df[ , -grep(var, names(plt_models_df))]
}
print(plt_models_df)
## id
## MFO###myMFO_classfr MFO###myMFO_classfr
## Random###myrandom_classfr Random###myrandom_classfr
## Max.cor.Y.rcv.1X1###glmnet Max.cor.Y.rcv.1X1###glmnet
## Max.cor.Y##rcv#rpart Max.cor.Y##rcv#rpart
## Interact.High.cor.Y##rcv#glmnet Interact.High.cor.Y##rcv#glmnet
## Low.cor.X##rcv#glmnet Low.cor.X##rcv#glmnet
## All.X##rcv#glmnet All.X##rcv#glmnet
## Final##rcv#glmnet Final##rcv#glmnet
## Final##rcv#glm Final##rcv#glm
## feats
## MFO###myMFO_classfr .rnorm
## Random###myrandom_classfr .rnorm
## Max.cor.Y.rcv.1X1###glmnet P.cor.cut.fctr,P.cosSml.cut.fctr
## Max.cor.Y##rcv#rpart P.cor.cut.fctr,P.cosSml.cut.fctr
## Interact.High.cor.Y##rcv#glmnet P.cor.cut.fctr,P.cosSml.cut.fctr,P.cor.cut.fctr:P.cor.cut.fctr,P.cor.cut.fctr:P.mnkSml.3.scld,P.cor.cut.fctr:P.mnkSml.3.scld.cut.fctr,P.cor.cut.fctr:P.mnkSml.2.scld.cut.fctr
## Low.cor.X##rcv#glmnet P.cor.cut.fctr,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.cosSml,.rnorm
## All.X##rcv#glmnet P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## Final##rcv#glmnet P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## Final##rcv#glm P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns max.AUCpROC.fit
## MFO###myMFO_classfr 0 0.5000000
## Random###myrandom_classfr 0 0.4988761
## Max.cor.Y.rcv.1X1###glmnet 0 0.5000000
## Max.cor.Y##rcv#rpart 1 0.5000000
## Interact.High.cor.Y##rcv#glmnet 25 0.5072711
## Low.cor.X##rcv#glmnet 25 0.5000000
## All.X##rcv#glmnet 25 0.5110832
## Final##rcv#glmnet 25 0.5110832
## Final##rcv#glm 1 0.5113383
## max.Sens.fit max.Spec.fit max.AUCROCR.fit
## MFO###myMFO_classfr 1.0000000 0.00000000 0.5000000
## Random###myrandom_classfr 0.8001407 0.19761160 0.4977388
## Max.cor.Y.rcv.1X1###glmnet 1.0000000 0.00000000 0.6853218
## Max.cor.Y##rcv#rpart 1.0000000 0.00000000 0.5000000
## Interact.High.cor.Y##rcv#glmnet 0.9960605 0.01848166 0.6898095
## Low.cor.X##rcv#glmnet 1.0000000 0.00000000 0.6847781
## All.X##rcv#glmnet 0.9943018 0.02786466 0.7013573
## Final##rcv#glmnet 0.9943018 0.02786466 0.7013573
## Final##rcv#glm 0.9931059 0.02957066 0.7019435
## opt.prob.threshold.fit max.f.score.fit
## MFO###myMFO_classfr 0.1 0.3310273
## Random###myrandom_classfr 0.1 0.3310273
## Max.cor.Y.rcv.1X1###glmnet 0.2 0.4164392
## Max.cor.Y##rcv#rpart 0.1 0.3310273
## Interact.High.cor.Y##rcv#glmnet 0.2 0.4164392
## Low.cor.X##rcv#glmnet 0.2 0.4164392
## All.X##rcv#glmnet 0.2 0.4243526
## Final##rcv#glmnet 0.2 0.4243526
## Final##rcv#glm 0.2 0.4249956
## max.Accuracy.fit max.Kappa.fit
## MFO###myMFO_classfr 0.1983420 0.00000000
## Random###myrandom_classfr 0.1983420 0.00000000
## Max.cor.Y.rcv.1X1###glmnet 0.5900068 0.18417719
## Max.cor.Y##rcv#rpart 0.8016580 0.00000000
## Interact.High.cor.Y##rcv#glmnet 0.8020339 0.02626314
## Low.cor.X##rcv#glmnet 0.8016580 0.00000000
## All.X##rcv#glmnet 0.8020528 0.03358448
## Final##rcv#glmnet 0.8020528 0.03358448
## Final##rcv#glm 0.8018272 0.03797967
## max.AUCpROC.OOB max.Sens.OOB max.Spec.OOB
## MFO###myMFO_classfr 0.5000000 1.0000000 0.00000000
## Random###myrandom_classfr 0.5027819 0.8024344 0.20312945
## Max.cor.Y.rcv.1X1###glmnet 0.5000000 1.0000000 0.00000000
## Max.cor.Y##rcv#rpart 0.5000000 1.0000000 0.00000000
## Interact.High.cor.Y##rcv#glmnet 0.5056058 0.9958489 0.01536273
## Low.cor.X##rcv#glmnet 0.5000000 1.0000000 0.00000000
## All.X##rcv#glmnet 0.5158859 0.9953564 0.03641536
## Final##rcv#glmnet 0.5158859 0.9953564 0.03641536
## Final##rcv#glm 0.5176724 0.9938085 0.04153627
## max.AUCROCR.OOB opt.prob.threshold.OOB
## MFO###myMFO_classfr 0.5000000 0.1
## Random###myrandom_classfr 0.5012493 0.1
## Max.cor.Y.rcv.1X1###glmnet 0.6809790 0.2
## Max.cor.Y##rcv#rpart 0.5000000 0.1
## Interact.High.cor.Y##rcv#glmnet 0.6835589 0.2
## Low.cor.X##rcv#glmnet 0.6821994 0.2
## All.X##rcv#glmnet 0.7003196 0.2
## Final##rcv#glmnet 0.7003196 0.2
## Final##rcv#glm 0.7008478 0.2
## max.f.score.OOB max.Accuracy.OOB
## MFO###myMFO_classfr 0.3309325 0.1982739
## Random###myrandom_classfr 0.3309325 0.1982739
## Max.cor.Y.rcv.1X1###glmnet 0.4165395 0.5900835
## Max.cor.Y##rcv#rpart 0.3309325 0.1982739
## Interact.High.cor.Y##rcv#glmnet 0.4165395 0.5900835
## Low.cor.X##rcv#glmnet 0.4165395 0.5900835
## All.X##rcv#glmnet 0.4209793 0.6251128
## Final##rcv#glmnet 0.4209793 0.6251128
## Final##rcv#glm 0.4216676 0.6314305
## max.Kappa.OOB inv.elapsedtime.everything
## MFO###myMFO_classfr 0.0000000 2.19780220
## Random###myrandom_classfr 0.0000000 3.27868852
## Max.cor.Y.rcv.1X1###glmnet 0.1843870 0.77279753
## Max.cor.Y##rcv#rpart 0.0000000 0.31240237
## Interact.High.cor.Y##rcv#glmnet 0.1843870 0.01998641
## Low.cor.X##rcv#glmnet 0.1843870 0.04306261
## All.X##rcv#glmnet 0.2012297 0.02379027
## Final##rcv#glmnet 0.2012297 0.02375466
## Final##rcv#glm 0.2043030 0.28441411
## inv.elapsedtime.final
## MFO###myMFO_classfr 250.0000000
## Random###myrandom_classfr 250.0000000
## Max.cor.Y.rcv.1X1###glmnet 2.4096386
## Max.cor.Y##rcv#rpart 19.6078431
## Interact.High.cor.Y##rcv#glmnet 0.7173601
## Low.cor.X##rcv#glmnet 1.9417476
## All.X##rcv#glmnet 0.9930487
## Final##rcv#glmnet 0.9910803
## Final##rcv#glm 3.8910506
# print(myplot_radar(radar_inp_df=plt_models_df))
# print(myplot_radar(radar_inp_df=subset(plt_models_df,
# !(mdl_id %in% grep("random|MFO", plt_models_df$id, value=TRUE)))))
# Compute CI for <metric>SD
glb_models_df <- mutate(glb_models_df,
max.df = ifelse(max.nTuningRuns > 1, max.nTuningRuns - 1, NA),
min.sd2ci.scaler = ifelse(is.na(max.df), NA, qt(0.975, max.df)))
for (var in grep("SD", names(glb_models_df), value=TRUE)) {
# Does CI alredy exist ?
var_components <- unlist(strsplit(var, "SD"))
varActul <- paste0(var_components[1], var_components[2])
varUpper <- paste0(var_components[1], "Upper", var_components[2])
varLower <- paste0(var_components[1], "Lower", var_components[2])
if (varUpper %in% names(glb_models_df)) {
warning(varUpper, " already exists in glb_models_df")
# Assuming Lower also exists
next
}
print(sprintf("var:%s", var))
# CI is dependent on sample size in t distribution; df=n-1
glb_models_df[, varUpper] <- glb_models_df[, varActul] +
glb_models_df[, "min.sd2ci.scaler"] * glb_models_df[, var]
glb_models_df[, varLower] <- glb_models_df[, varActul] -
glb_models_df[, "min.sd2ci.scaler"] * glb_models_df[, var]
}
## Warning: max.AccuracyUpper.fit already exists in glb_models_df
## [1] "var:max.KappaSD.fit"
# Plot metrics with CI
plt_models_df <- glb_models_df[, "id", FALSE]
pltCI_models_df <- glb_models_df[, "id", FALSE]
for (var in grep("Upper", names(glb_models_df), value=TRUE)) {
var_components <- unlist(strsplit(var, "Upper"))
col_name <- unlist(paste(var_components, collapse=""))
plt_models_df[, col_name] <- glb_models_df[, col_name]
for (name in paste0(var_components[1], c("Upper", "Lower"), var_components[2]))
pltCI_models_df[, name] <- glb_models_df[, name]
}
build_statsCI_data <- function(plt_models_df) {
mltd_models_df <- melt(plt_models_df, id.vars="id")
mltd_models_df$data <- sapply(1:nrow(mltd_models_df),
function(row_ix) tail(unlist(strsplit(as.character(
mltd_models_df[row_ix, "variable"]), "[.]")), 1))
mltd_models_df$label <- sapply(1:nrow(mltd_models_df),
function(row_ix) head(unlist(strsplit(as.character(
mltd_models_df[row_ix, "variable"]),
paste0(".", mltd_models_df[row_ix, "data"]))), 1))
#print(mltd_models_df)
return(mltd_models_df)
}
mltd_models_df <- build_statsCI_data(plt_models_df)
mltdCI_models_df <- melt(pltCI_models_df, id.vars="id")
for (row_ix in 1:nrow(mltdCI_models_df)) {
for (type in c("Upper", "Lower")) {
if (length(var_components <- unlist(strsplit(
as.character(mltdCI_models_df[row_ix, "variable"]), type))) > 1) {
#print(sprintf("row_ix:%d; type:%s; ", row_ix, type))
mltdCI_models_df[row_ix, "label"] <- var_components[1]
mltdCI_models_df[row_ix, "data"] <-
unlist(strsplit(var_components[2], "[.]"))[2]
mltdCI_models_df[row_ix, "type"] <- type
break
}
}
}
wideCI_models_df <- reshape(subset(mltdCI_models_df, select=-variable),
timevar="type",
idvar=setdiff(names(mltdCI_models_df), c("type", "value", "variable")),
direction="wide")
#print(wideCI_models_df)
mrgdCI_models_df <- merge(wideCI_models_df, mltd_models_df, all.x=TRUE)
#print(mrgdCI_models_df)
# Merge stats back in if CIs don't exist
goback_vars <- c()
for (var in unique(mltd_models_df$label)) {
for (type in unique(mltd_models_df$data)) {
var_type <- paste0(var, ".", type)
# if this data is already present, next
if (var_type %in% unique(paste(mltd_models_df$label, mltd_models_df$data,
sep=".")))
next
#print(sprintf("var_type:%s", var_type))
goback_vars <- c(goback_vars, var_type)
}
}
if (length(goback_vars) > 0) {
mltd_goback_df <- build_statsCI_data(glb_models_df[, c("id", goback_vars)])
mltd_models_df <- rbind(mltd_models_df, mltd_goback_df)
}
# mltd_models_df <- merge(mltd_models_df, glb_models_df[, c("id", "model_method")],
# all.x=TRUE)
png(paste0(glbOut$pfx, "models_bar.png"), width=480*3, height=480*2)
#print(gp <- myplot_bar(mltd_models_df, "id", "value", colorcol_name="model_method") +
print(gp <- myplot_bar(df=mltd_models_df, xcol_name="id", ycol_names="value") +
geom_errorbar(data=mrgdCI_models_df,
mapping=aes(x=mdl_id, ymax=value.Upper, ymin=value.Lower), width=0.5) +
facet_grid(label ~ data, scales="free") +
theme(axis.text.x = element_text(angle = 90,vjust = 0.5)))
## Warning: Removed 5 rows containing missing values (geom_errorbar).
dev.off()
## quartz_off_screen
## 2
print(gp)
## Warning: Removed 5 rows containing missing values (geom_errorbar).
dsp_models_cols <- c("id",
glbMdlMetricsEval[glbMdlMetricsEval %in% names(glb_models_df)],
grep("opt.", names(glb_models_df), fixed = TRUE, value = TRUE))
# if (glb_is_classification && glb_is_binomial)
# dsp_models_cols <- c(dsp_models_cols, "opt.prob.threshold.OOB")
print(dsp_models_df <- orderBy(get_model_sel_frmla(), glb_models_df)[, dsp_models_cols])
## id max.Accuracy.OOB max.AUCROCR.OOB
## 9 Final##rcv#glm 0.6314305 0.7008478
## 7 All.X##rcv#glmnet 0.6251128 0.7003196
## 8 Final##rcv#glmnet 0.6251128 0.7003196
## 5 Interact.High.cor.Y##rcv#glmnet 0.5900835 0.6835589
## 6 Low.cor.X##rcv#glmnet 0.5900835 0.6821994
## 3 Max.cor.Y.rcv.1X1###glmnet 0.5900835 0.6809790
## 2 Random###myrandom_classfr 0.1982739 0.5012493
## 4 Max.cor.Y##rcv#rpart 0.1982739 0.5000000
## 1 MFO###myMFO_classfr 0.1982739 0.5000000
## max.AUCpROC.OOB max.Accuracy.fit opt.prob.threshold.fit
## 9 0.5176724 0.8018272 0.2
## 7 0.5158859 0.8020528 0.2
## 8 0.5158859 0.8020528 0.2
## 5 0.5056058 0.8020339 0.2
## 6 0.5000000 0.8016580 0.2
## 3 0.5000000 0.5900068 0.2
## 2 0.5027819 0.1983420 0.1
## 4 0.5000000 0.8016580 0.1
## 1 0.5000000 0.1983420 0.1
## opt.prob.threshold.OOB
## 9 0.2
## 7 0.2
## 8 0.2
## 5 0.2
## 6 0.2
## 3 0.2
## 2 0.1
## 4 0.1
## 1 0.1
# print(myplot_radar(radar_inp_df = dsp_models_df))
print("Metrics used for model selection:"); print(get_model_sel_frmla())
## [1] "Metrics used for model selection:"
## ~-max.Accuracy.OOB - max.AUCROCR.OOB - max.AUCpROC.OOB - max.Accuracy.fit -
## opt.prob.threshold.OOB
## <environment: 0x7f99a044bd80>
print(sprintf("Best model id: %s", dsp_models_df[1, "id"]))
## [1] "Best model id: Final##rcv#glm"
glb_get_predictions <- function(df, mdl_id, rsp_var, prob_threshold_def=NULL, verbose=FALSE) {
mdl <- glb_models_lst[[mdl_id]]
clmnNames <- mygetPredictIds(rsp_var, mdl_id)
predct_var_name <- clmnNames$value
predct_prob_var_name <- clmnNames$prob
predct_accurate_var_name <- clmnNames$is.acc
predct_error_var_name <- clmnNames$err
predct_erabs_var_name <- clmnNames$err.abs
if (glb_is_regression) {
df[, predct_var_name] <- predict(mdl, newdata=df, type="raw")
if (verbose) print(myplot_scatter(df, glb_rsp_var, predct_var_name) +
facet_wrap(reformulate(glbFeatsCategory), scales = "free") +
stat_smooth(method="glm"))
df[, predct_error_var_name] <- df[, predct_var_name] - df[, glb_rsp_var]
if (verbose) print(myplot_scatter(df, predct_var_name, predct_error_var_name) +
#facet_wrap(reformulate(glbFeatsCategory), scales = "free") +
stat_smooth(method="auto"))
if (verbose) print(myplot_scatter(df, glb_rsp_var, predct_error_var_name) +
#facet_wrap(reformulate(glbFeatsCategory), scales = "free") +
stat_smooth(method="glm"))
df[, predct_erabs_var_name] <- abs(df[, predct_error_var_name])
if (verbose) print(head(orderBy(reformulate(c("-", predct_erabs_var_name)), df)))
df[, predct_accurate_var_name] <- (df[, glb_rsp_var] == df[, predct_var_name])
}
if (glb_is_classification && glb_is_binomial) {
prob_threshold <- glb_models_df[glb_models_df$id == mdl_id,
"opt.prob.threshold.OOB"]
if (is.null(prob_threshold) || is.na(prob_threshold)) {
warning("Using default probability threshold: ", prob_threshold_def)
if (is.null(prob_threshold <- prob_threshold_def))
stop("Default probability threshold is NULL")
}
df[, predct_prob_var_name] <- predict(mdl, newdata = df, type = "prob")[, 2]
df[, predct_var_name] <-
factor(levels(df[, glb_rsp_var])[
(df[, predct_prob_var_name] >=
prob_threshold) * 1 + 1], levels(df[, glb_rsp_var]))
# if (verbose) print(myplot_scatter(df, glb_rsp_var, predct_var_name) +
# facet_wrap(reformulate(glbFeatsCategory), scales = "free") +
# stat_smooth(method="glm"))
df[, predct_error_var_name] <- df[, predct_var_name] != df[, glb_rsp_var]
# if (verbose) print(myplot_scatter(df, predct_var_name, predct_error_var_name) +
# #facet_wrap(reformulate(glbFeatsCategory), scales = "free") +
# stat_smooth(method="auto"))
# if (verbose) print(myplot_scatter(df, glb_rsp_var, predct_error_var_name) +
# #facet_wrap(reformulate(glbFeatsCategory), scales = "free") +
# stat_smooth(method="glm"))
# if prediction is a TP (true +ve), measure distance from 1.0
tp <- which((df[, predct_var_name] == df[, glb_rsp_var]) &
(df[, predct_var_name] == levels(df[, glb_rsp_var])[2]))
df[tp, predct_erabs_var_name] <- abs(1 - df[tp, predct_prob_var_name])
#rowIx <- which.max(df[tp, predct_erabs_var_name]); df[tp, c(glbFeatsId, glb_rsp_var, predct_var_name, predct_prob_var_name, predct_erabs_var_name)][rowIx, ]
# if prediction is a TN (true -ve), measure distance from 0.0
tn <- which((df[, predct_var_name] == df[, glb_rsp_var]) &
(df[, predct_var_name] == levels(df[, glb_rsp_var])[1]))
df[tn, predct_erabs_var_name] <- abs(0 - df[tn, predct_prob_var_name])
#rowIx <- which.max(df[tn, predct_erabs_var_name]); df[tn, c(glbFeatsId, glb_rsp_var, predct_var_name, predct_prob_var_name, predct_erabs_var_name)][rowIx, ]
# if prediction is a FP (flse +ve), measure distance from 0.0
fp <- which((df[, predct_var_name] != df[, glb_rsp_var]) &
(df[, predct_var_name] == levels(df[, glb_rsp_var])[2]))
df[fp, predct_erabs_var_name] <- abs(0 - df[fp, predct_prob_var_name])
#rowIx <- which.max(df[fp, predct_erabs_var_name]); df[fp, c(glbFeatsId, glb_rsp_var, predct_var_name, predct_prob_var_name, predct_erabs_var_name)][rowIx, ]
# if prediction is a FN (flse -ve), measure distance from 1.0
fn <- which((df[, predct_var_name] != df[, glb_rsp_var]) &
(df[, predct_var_name] == levels(df[, glb_rsp_var])[1]))
df[fn, predct_erabs_var_name] <- abs(1 - df[fn, predct_prob_var_name])
#rowIx <- which.max(df[fn, predct_erabs_var_name]); df[fn, c(glbFeatsId, glb_rsp_var, predct_var_name, predct_prob_var_name, predct_erabs_var_name)][rowIx, ]
if (verbose) print(head(orderBy(reformulate(c("-", predct_erabs_var_name)), df)))
df[, predct_accurate_var_name] <- (df[, glb_rsp_var] == df[, predct_var_name])
}
if (glb_is_classification && !glb_is_binomial) {
df[, predct_var_name] <- predict(mdl, newdata = df, type = "raw")
probCls <- predict(mdl, newdata = df, type = "prob")
df[, predct_prob_var_name] <- NA
for (cls in names(probCls)) {
mask <- (df[, predct_var_name] == cls)
df[mask, predct_prob_var_name] <- probCls[mask, cls]
}
if (verbose) print(myplot_histogram(df, predct_prob_var_name,
fill_col_name = predct_var_name))
if (verbose) print(myplot_histogram(df, predct_prob_var_name,
facet_frmla = paste0("~", glb_rsp_var)))
df[, predct_error_var_name] <- df[, predct_var_name] != df[, glb_rsp_var]
# if prediction is erroneous, measure predicted class prob from actual class prob
df[, predct_erabs_var_name] <- 0
for (cls in names(probCls)) {
mask <- (df[, glb_rsp_var] == cls) & (df[, predct_error_var_name])
df[mask, predct_erabs_var_name] <- probCls[mask, cls]
}
df[, predct_accurate_var_name] <- (df[, glb_rsp_var] == df[, predct_var_name])
}
return(df)
}
#stop(here"); glb2Sav(); glbObsAll <- savObsAll; glbObsTrn <- savObsTrn; glbObsFit <- savObsFit; glbObsOOB <- savObsOOB; sav_models_df <- glb_models_df; glb_models_df <- sav_models_df; glb_featsimp_df <- sav_featsimp_df
myget_category_stats <- function(obs_df, mdl_id, label) {
require(dplyr)
require(lazyeval)
predct_var_name <- mygetPredictIds(glb_rsp_var, mdl_id)$value
predct_error_var_name <- mygetPredictIds(glb_rsp_var, mdl_id)$err.abs
if (!predct_var_name %in% names(obs_df))
obs_df <- glb_get_predictions(obs_df, mdl_id, glb_rsp_var)
tmp_obs_df <- obs_df[, c(glbFeatsCategory, glb_rsp_var,
predct_var_name, predct_error_var_name)]
# tmp_obs_df <- obs_df %>%
# dplyr::select_(glbFeatsCategory, glb_rsp_var, predct_var_name, predct_error_var_name)
#dplyr::rename(startprice.log10.predict.RFE.X.glmnet.err=error_abs_OOB)
names(tmp_obs_df)[length(names(tmp_obs_df))] <- paste0("err.abs.", label)
ret_ctgry_df <- tmp_obs_df %>%
dplyr::group_by_(glbFeatsCategory) %>%
dplyr::summarise_(#interp(~sum(abs(var)), var=as.name(glb_rsp_var)),
interp(~sum(var), var=as.name(paste0("err.abs.", label))),
interp(~mean(var), var=as.name(paste0("err.abs.", label))),
interp(~n()))
names(ret_ctgry_df) <- c(glbFeatsCategory,
#paste0(glb_rsp_var, ".abs.", label, ".sum"),
paste0("err.abs.", label, ".sum"),
paste0("err.abs.", label, ".mean"),
paste0(".n.", label))
ret_ctgry_df <- dplyr::ungroup(ret_ctgry_df)
#colSums(ret_ctgry_df[, -grep(glbFeatsCategory, names(ret_ctgry_df))])
return(ret_ctgry_df)
}
#print(colSums((ctgry_df <- myget_category_stats(obs_df=glbObsFit, mdl_id="", label="fit"))[, -grep(glbFeatsCategory, names(ctgry_df))]))
if (!is.null(glb_mdl_ensemble)) {
fit.models_2_chunk_df <- myadd_chunk(fit.models_2_chunk_df,
paste0("fit.models_2_", mdl_id_pfx), major.inc = TRUE,
label.minor = "ensemble")
mdl_id_pfx <- "Ensemble"
if (#(glb_is_regression) |
((glb_is_classification) & (!glb_is_binomial)))
stop("Ensemble models not implemented yet for multinomial classification")
mygetEnsembleAutoMdlIds <- function() {
tmp_models_df <- orderBy(get_model_sel_frmla(), glb_models_df)
row.names(tmp_models_df) <- tmp_models_df$id
mdl_threshold_pos <-
min(which(grepl("MFO|Random|Baseline", tmp_models_df$id))) - 1
mdlIds <- tmp_models_df$id[1:mdl_threshold_pos]
return(mdlIds[!grepl("Ensemble", mdlIds)])
}
if (glb_mdl_ensemble == "auto") {
glb_mdl_ensemble <- mygetEnsembleAutoMdlIds()
mdl_id_pfx <- paste0(mdl_id_pfx, ".auto")
} else if (grepl("^%<d-%", glb_mdl_ensemble)) {
glb_mdl_ensemble <- eval(parse(text =
str_trim(unlist(strsplit(glb_mdl_ensemble, "%<d-%"))[2])))
}
for (mdl_id in glb_mdl_ensemble) {
if (!(mdl_id %in% names(glb_models_lst))) {
warning("Model ", mdl_id, " in glb_model_ensemble not found !")
next
}
glbObsFit <- glb_get_predictions(df = glbObsFit, mdl_id, glb_rsp_var)
glbObsOOB <- glb_get_predictions(df = glbObsOOB, mdl_id, glb_rsp_var)
}
#mdl_id_pfx <- "Ensemble.RFE"; mdlId <- paste0(mdl_id_pfx, ".glmnet")
#glb_mdl_ensemble <- gsub(mygetPredictIds$value, "", grep("RFE\\.X\\.(?!Interact)", row.names(glb_featsimp_df), perl = TRUE, value = TRUE), fixed = TRUE)
#varImp(glb_models_lst[[mdlId]])
#cor_df <- data.frame(cor=cor(glbObsFit[, glb_rsp_var], glbObsFit[, paste(mygetPredictIds$value, glb_mdl_ensemble)], use="pairwise.complete.obs"))
#glbObsFit <- glb_get_predictions(df=glbObsFit, "Ensemble.glmnet", glb_rsp_var);print(colSums((ctgry_df <- myget_category_stats(obs_df=glbObsFit, mdl_id="Ensemble.glmnet", label="fit"))[, -grep(glbFeatsCategory, names(ctgry_df))]))
### bid0_sp
# Better than MFO; models.n=28; min.RMSE.fit=0.0521233; err.abs.fit.sum=7.3631895
# old: Top x from auto; models.n= 5; min.RMSE.fit=0.06311047; err.abs.fit.sum=9.5937080
# RFE only ; models.n=16; min.RMSE.fit=0.05148588; err.abs.fit.sum=7.2875091
# RFE subset only ;models.n= 5; min.RMSE.fit=0.06040702; err.abs.fit.sum=9.059088
# RFE subset only ;models.n= 9; min.RMSE.fit=0.05933167; err.abs.fit.sum=8.7421288
# RFE subset only ;models.n=15; min.RMSE.fit=0.0584607; err.abs.fit.sum=8.5902066
# RFE subset only ;models.n=17; min.RMSE.fit=0.05496899; err.abs.fit.sum=8.0170431
# RFE subset only ;models.n=18; min.RMSE.fit=0.05441577; err.abs.fit.sum=7.837223
# RFE subset only ;models.n=16; min.RMSE.fit=0.05441577; err.abs.fit.sum=7.837223
### bid0_sp
### bid1_sp
# "auto"; err.abs.fit.sum=76.699774; min.RMSE.fit=0.2186429
# "RFE.X.*"; err.abs.fit.sum=; min.RMSE.fit=0.221114
### bid1_sp
indep_vars <- paste(mygetPredictIds(glb_rsp_var)$value, glb_mdl_ensemble, sep = "")
if (glb_is_classification)
indep_vars <- paste(indep_vars, ".prob", sep = "")
# Some models in glb_mdl_ensemble might not be fitted e.g. RFE.X.Interact
indep_vars <- intersect(indep_vars, names(glbObsFit))
# indep_vars <- grep(mygetPredictIds(glb_rsp_var)$value, names(glbObsFit), fixed=TRUE, value=TRUE)
# if (glb_is_regression)
# indep_vars <- indep_vars[!grepl("(err\\.abs|accurate)$", indep_vars)]
# if (glb_is_classification && glb_is_binomial)
# indep_vars <- grep("prob$", indep_vars, value=TRUE) else
# indep_vars <- indep_vars[!grepl("err$", indep_vars)]
#rfe_fit_ens_results <- myrun_rfe(glbObsFit, indep_vars)
for (method in c("glm", "glmnet")) {
for (trainControlMethod in
c("boot", "boot632", "cv", "repeatedcv"
#, "LOOCV" # tuneLength * nrow(fitDF)
, "LGOCV", "adaptive_cv"
#, "adaptive_boot" #error: adaptive$min should be less than 3
#, "adaptive_LGOCV" #error: adaptive$min should be less than 3
)) {
#sav_models_df <- glb_models_df; all.equal(sav_models_df, glb_models_df)
#glb_models_df <- sav_models_df; print(glb_models_df$id)
if ((method == "glm") && (trainControlMethod != "repeatedcv"))
# glm used only to identify outliers
next
ret_lst <- myfit_mdl(
mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = paste0(mdl_id_pfx, ".", trainControlMethod),
type = glb_model_type, tune.df = NULL,
trainControl.method = trainControlMethod,
trainControl.number = glb_rcv_n_folds,
trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = method)),
indep_vars = indep_vars, rsp_var = glb_rsp_var,
fit_df = glbObsFit, OOB_df = glbObsOOB)
}
}
dsp_models_df <- get_dsp_models_df()
}
if (is.null(glb_sel_mdl_id))
glb_sel_mdl_id <- dsp_models_df[1, "id"] else
print(sprintf("User specified selection: %s", glb_sel_mdl_id))
## [1] "User specified selection: All.X##rcv#glmnet"
myprint_mdl(glb_sel_mdl <- glb_models_lst[[glb_sel_mdl_id]])
## Length Class Mode
## a0 81 -none- numeric
## beta 1701 dgCMatrix S4
## df 81 -none- numeric
## dim 2 -none- numeric
## lambda 81 -none- numeric
## dev.ratio 81 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 21 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept) .rnorm
## 0.99526771 -0.02090746
## P.cor P.cor.cut.fctr(0,0.5]
## 2.64474015 -0.84028965
## P.cor.cut.fctr(0.5,0.7] P.cor.cut.fctr(0.7,1]
## -0.54462113 -0.12752847
## P.cosSml P.cosSml.cut.fctr(0.95,0.97]
## -3.63676497 0.04849048
## P.cosSml.cut.fctr(0.97,0.98] P.cosSml.cut.fctr(0.98,1]
## 0.16031813 0.37667232
## P.mnkSml.1.scld P.mnkSml.1.scld.cut.fctr(0.47,0.64]
## -0.26783206 0.10887413
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] P.mnkSml.1.scld.cut.fctr(0.87,2.9]
## 0.19672374 0.41167020
## P.mnkSml.2.scld P.mnkSml.2.scld.cut.fctr(0.86,1.14]
## 0.11127701 -0.09447011
## P.mnkSml.2.scld.cut.fctr(1.14,1.48] P.mnkSml.2.scld.cut.fctr(1.48,4.6]
## -0.18315876 -0.37428129
## P.mnkSml.3.scld P.mnkSml.3.scld.cut.fctr(0.21,0.27]
## 0.55362273 0.03032794
## P.mnkSml.3.scld.cut.fctr(0.27,0.35] P.mnkSml.3.scld.cut.fctr(0.35,1.1]
## 0.04257503 -0.01830337
## [1] "max lambda < lambdaOpt:"
## [1] "Feats mismatch between coefs_left & rght:"
## [1] "(Intercept)"
## [2] ".rnorm"
## [3] "P.cor"
## [4] "P.cor.cut.fctr(0,0.5]"
## [5] "P.cor.cut.fctr(0.5,0.7]"
## [6] "P.cor.cut.fctr(0.7,1]"
## [7] "P.cosSml"
## [8] "P.cosSml.cut.fctr(0.95,0.97]"
## [9] "P.cosSml.cut.fctr(0.97,0.98]"
## [10] "P.cosSml.cut.fctr(0.98,1]"
## [11] "P.mnkSml.1.scld"
## [12] "P.mnkSml.1.scld.cut.fctr(0.47,0.64]"
## [13] "P.mnkSml.1.scld.cut.fctr(0.64,0.87]"
## [14] "P.mnkSml.1.scld.cut.fctr(0.87,2.9]"
## [15] "P.mnkSml.2.scld"
## [16] "P.mnkSml.2.scld.cut.fctr(0.86,1.14]"
## [17] "P.mnkSml.2.scld.cut.fctr(1.14,1.48]"
## [18] "P.mnkSml.2.scld.cut.fctr(1.48,4.6]"
## [19] "P.mnkSml.3.scld"
## [20] "P.mnkSml.3.scld.cut.fctr(0.21,0.27]"
## [21] "P.mnkSml.3.scld.cut.fctr(0.27,0.35]"
## [22] "P.mnkSml.3.scld.cut.fctr(0.35,1.1]"
## [1] TRUE
# From here to save(), this should all be in one function
# these are executed in the same seq twice more:
# fit.data.training & predict.data.new chunks
print(sprintf("%s fit prediction diagnostics:", glb_sel_mdl_id))
## [1] "All.X##rcv#glmnet fit prediction diagnostics:"
glbObsFit <- glb_get_predictions(df = glbObsFit, mdl_id = glb_sel_mdl_id,
rsp_var = glb_rsp_var)
print(sprintf("%s OOB prediction diagnostics:", glb_sel_mdl_id))
## [1] "All.X##rcv#glmnet OOB prediction diagnostics:"
glbObsOOB <- glb_get_predictions(df = glbObsOOB, mdl_id = glb_sel_mdl_id,
rsp_var = glb_rsp_var)
print(glb_featsimp_df <- myget_feats_importance(mdl = glb_sel_mdl, featsimp_df = NULL))
## All.X..rcv.glmnet.imp imp
## P.cosSml 100.00000000 100.00000000
## P.cor 72.58434846 72.58434846
## P.cor.cut.fctr(0,0.5] 22.71645732 22.71645732
## P.mnkSml.3.scld 14.79411470 14.79411470
## P.cor.cut.fctr(0.5,0.7] 14.54534598 14.54534598
## P.mnkSml.1.scld.cut.fctr(0.87,2.9] 10.87110697 10.87110697
## P.cosSml.cut.fctr(0.98,1] 9.90390371 9.90390371
## P.mnkSml.2.scld.cut.fctr(1.48,4.6] 9.83782520 9.83782520
## P.mnkSml.1.scld 6.89598837 6.89598837
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] 4.93083499 4.93083499
## P.mnkSml.2.scld.cut.fctr(1.14,1.48] 4.55595258 4.55595258
## P.cosSml.cut.fctr(0.97,0.98] 3.92472751 3.92472751
## P.cor.cut.fctr(0.7,1] 3.01855082 3.01855082
## P.mnkSml.2.scld 2.56942459 2.56942459
## P.mnkSml.1.scld.cut.fctr(0.47,0.64] 2.50301837 2.50301837
## P.mnkSml.2.scld.cut.fctr(0.86,1.14] 2.10494834 2.10494834
## P.cosSml.cut.fctr(0.95,0.97] 0.83425263 0.83425263
## P.mnkSml.3.scld.cut.fctr(0.27,0.35] 0.67077294 0.67077294
## P.mnkSml.3.scld.cut.fctr(0.21,0.27] 0.33231166 0.33231166
## .rnorm 0.07196682 0.07196682
## P.mnkSml.3.scld.cut.fctr(0.35,1.1] 0.00000000 0.00000000
#mdl_id <-"RFE.X.glmnet"; glb_featsimp_df <- myget_feats_importance(glb_models_lst[[mdl_id]], glb_featsimp_df); glb_featsimp_df[, paste0(mdl_id, ".imp")] <- glb_featsimp_df$imp; print(glb_featsimp_df)
#print(head(sbst_featsimp_df <- subset(glb_featsimp_df, is.na(RFE.X.glmnet.imp) | (abs(RFE.X.YeoJohnson.glmnet.imp - RFE.X.glmnet.imp) > 0.0001), select=-imp)))
#print(orderBy(~ -cor.y.abs, subset(glb_feats_df, id %in% c(row.names(sbst_featsimp_df), "startprice.dcm1.is9", "D.weight.post.stop.sum"))))
# Used again in fit.data.training & predict.data.new chunks
glb_analytics_diag_plots <- function(obs_df, mdl_id, prob_threshold=NULL) {
if (!is.null(featsimp_df <- glb_featsimp_df)) {
featsimp_df$feat <- gsub("`(.*?)`", "\\1", row.names(featsimp_df))
featsimp_df$feat.interact <- gsub("(.*?):(.*)", "\\2", featsimp_df$feat)
featsimp_df$feat <- gsub("(.*?):(.*)", "\\1", featsimp_df$feat)
featsimp_df$feat.interact <-
ifelse(featsimp_df$feat.interact == featsimp_df$feat,
NA, featsimp_df$feat.interact)
featsimp_df$feat <-
gsub("(.*?)\\.fctr(.*)", "\\1\\.fctr", featsimp_df$feat)
featsimp_df$feat.interact <-
gsub("(.*?)\\.fctr(.*)", "\\1\\.fctr", featsimp_df$feat.interact)
featsimp_df <- orderBy(~ -imp.max,
summaryBy(imp ~ feat + feat.interact, data=featsimp_df,
FUN=max))
#rex_str=":(.*)"; txt_vctr=tail(featsimp_df$feat); ret_lst <- regexec(rex_str, txt_vctr); ret_lst <- regmatches(txt_vctr, ret_lst); ret_vctr <- sapply(1:length(ret_lst), function(pos_ix) ifelse(length(ret_lst[[pos_ix]]) > 0, ret_lst[[pos_ix]], "")); print(ret_vctr <- ret_vctr[ret_vctr != ""])
featsimp_df <- subset(featsimp_df, !is.na(imp.max))
if (nrow(featsimp_df) > 5) {
warning("Limiting important feature scatter plots to 5 out of ",
nrow(featsimp_df))
featsimp_df <- head(featsimp_df, 5)
}
# if (!all(is.na(featsimp_df$feat.interact)))
# stop("not implemented yet")
rsp_var_out <- mygetPredictIds(glb_rsp_var, mdl_id)$value
for (var in featsimp_df$feat) {
plot_df <- melt(obs_df, id.vars = var,
measure.vars = c(glb_rsp_var, rsp_var_out))
print(myplot_scatter(plot_df, var, "value", colorcol_name = "variable",
facet_colcol_name = "variable", jitter = TRUE) +
guides(color = FALSE))
}
}
if (glb_is_regression) {
if (is.null(featsimp_df) || (nrow(featsimp_df) == 0))
warning("No important features in glb_fin_mdl") else
print(myplot_prediction_regression(df=obs_df,
feat_x=ifelse(nrow(featsimp_df) > 1, featsimp_df$feat[2],
".rownames"),
feat_y=featsimp_df$feat[1],
rsp_var=glb_rsp_var, rsp_var_out=rsp_var_out,
id_vars=glbFeatsId)
# + facet_wrap(reformulate(featsimp_df$feat[2])) # if [1 or 2] is a factor
# + geom_point(aes_string(color="<col_name>.fctr")) # to color the plot
)
}
if (glb_is_classification) {
if (is.null(featsimp_df) || (nrow(featsimp_df) == 0))
warning("No features in selected model are statistically important")
else print(myplot_prediction_classification(df = obs_df,
feat_x = ifelse(nrow(featsimp_df) > 1,
featsimp_df$feat[2], ".rownames"),
feat_y = featsimp_df$feat[1],
rsp_var = glb_rsp_var,
rsp_var_out = rsp_var_out,
id_vars = glbFeatsId,
prob_threshold = prob_threshold))
}
}
if (glb_is_classification && glb_is_binomial)
glb_analytics_diag_plots(obs_df = glbObsOOB, mdl_id = glb_sel_mdl_id,
prob_threshold = glb_models_df[glb_models_df$id == glb_sel_mdl_id,
"opt.prob.threshold.OOB"]) else
glb_analytics_diag_plots(obs_df = glbObsOOB, mdl_id = glb_sel_mdl_id)
## Warning in glb_analytics_diag_plots(obs_df = glbObsOOB, mdl_id =
## glb_sel_mdl_id, : Limiting important feature scatter plots to 5 out of 11
## [1] "Min/Max Boundaries: "
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 1 Train#1456#72#28 left_eye_center 0.02169475
## 2 Train#3231#67#38 left_eye_center 0.52736746
## 3 Train#5882#66#38 left_eye_center 0.58906692
## 4 Train#4303#67#34 .none 0.38760211
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 1 .none TRUE
## 2 left_eye_center FALSE
## 3 left_eye_center FALSE
## 4 left_eye_center TRUE
## label.fctr.All.X..rcv.glmnet.err.abs label.fctr.All.X..rcv.glmnet.is.acc
## 1 0.9783052 FALSE
## 2 0.4726325 TRUE
## 3 0.4109331 TRUE
## 4 0.3876021 FALSE
## label.fctr.All.X..rcv.glmnet.accurate label.fctr.All.X..rcv.glmnet.error
## 1 FALSE -0.1783052
## 2 TRUE 0.0000000
## 3 TRUE 0.0000000
## 4 FALSE 0.1876021
## .label
## 1 Train#1456#72#28
## 2 Train#3231#67#38
## 3 Train#5882#66#38
## 4 Train#4303#67#34
## [1] "Inaccurate: "
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 1 Train#1456#72#28 left_eye_center 0.02169475
## 2 Train#4049#68#37 left_eye_center 0.04156099
## 3 Train#2651#71#35 left_eye_center 0.04347353
## 4 Train#0074#70#31 left_eye_center 0.04391682
## 5 Train#1153#67#34 left_eye_center 0.04428690
## 6 Train#5240#69#42 left_eye_center 0.04448363
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 1 .none TRUE
## 2 .none TRUE
## 3 .none TRUE
## 4 .none TRUE
## 5 .none TRUE
## 6 .none TRUE
## label.fctr.All.X..rcv.glmnet.err.abs label.fctr.All.X..rcv.glmnet.is.acc
## 1 0.9783052 FALSE
## 2 0.9584390 FALSE
## 3 0.9565265 FALSE
## 4 0.9560832 FALSE
## 5 0.9557131 FALSE
## 6 0.9555164 FALSE
## label.fctr.All.X..rcv.glmnet.accurate label.fctr.All.X..rcv.glmnet.error
## 1 FALSE -0.1783052
## 2 FALSE -0.1584390
## 3 FALSE -0.1565265
## 4 FALSE -0.1560832
## 5 FALSE -0.1557131
## 6 FALSE -0.1555164
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 620 Train#4113#66#27 left_eye_center 0.1297886
## 2255 Train#4837#68#34 .none 0.2240955
## 3752 Train#6652#64#37 .none 0.2559267
## 4036 Train#4379#63#44 .none 0.2625402
## 5323 Train#1084#68#38 .none 0.3268708
## 6317 Train#4357#72#42 .none 0.4551435
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 620 .none TRUE
## 2255 left_eye_center TRUE
## 3752 left_eye_center TRUE
## 4036 left_eye_center TRUE
## 5323 left_eye_center TRUE
## 6317 left_eye_center TRUE
## label.fctr.All.X..rcv.glmnet.err.abs
## 620 0.8702114
## 2255 0.2240955
## 3752 0.2559267
## 4036 0.2625402
## 5323 0.3268708
## 6317 0.4551435
## label.fctr.All.X..rcv.glmnet.is.acc
## 620 FALSE
## 2255 FALSE
## 3752 FALSE
## 4036 FALSE
## 5323 FALSE
## 6317 FALSE
## label.fctr.All.X..rcv.glmnet.accurate
## 620 FALSE
## 2255 FALSE
## 3752 FALSE
## 4036 FALSE
## 5323 FALSE
## 6317 FALSE
## label.fctr.All.X..rcv.glmnet.error
## 620 -0.07021143
## 2255 0.02409554
## 3752 0.05592675
## 4036 0.06254015
## 5323 0.12687082
## 6317 0.25514349
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 6641 Train#2286#64#36 .none 0.5409636
## 6642 Train#4778#65#42 .none 0.5410543
## 6643 Train#2833#64#40 .none 0.5463673
## 6644 Train#4556#70#38 .none 0.5483082
## 6645 Train#5757#65#39 .none 0.5605377
## 6646 Train#2926#67#43 .none 0.5717721
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 6641 left_eye_center TRUE
## 6642 left_eye_center TRUE
## 6643 left_eye_center TRUE
## 6644 left_eye_center TRUE
## 6645 left_eye_center TRUE
## 6646 left_eye_center TRUE
## label.fctr.All.X..rcv.glmnet.err.abs
## 6641 0.5409636
## 6642 0.5410543
## 6643 0.5463673
## 6644 0.5483082
## 6645 0.5605377
## 6646 0.5717721
## label.fctr.All.X..rcv.glmnet.is.acc
## 6641 FALSE
## 6642 FALSE
## 6643 FALSE
## 6644 FALSE
## 6645 FALSE
## 6646 FALSE
## label.fctr.All.X..rcv.glmnet.accurate
## 6641 FALSE
## 6642 FALSE
## 6643 FALSE
## 6644 FALSE
## 6645 FALSE
## 6646 FALSE
## label.fctr.All.X..rcv.glmnet.error
## 6641 0.3409636
## 6642 0.3410543
## 6643 0.3463673
## 6644 0.3483082
## 6645 0.3605377
## 6646 0.3717721
if (!is.null(glbFeatsCategory)) {
glbLvlCategory <- merge(glbLvlCategory,
myget_category_stats(obs_df = glbObsFit, mdl_id = glb_sel_mdl_id,
label = "fit"),
by = glbFeatsCategory, all = TRUE)
row.names(glbLvlCategory) <- glbLvlCategory[, glbFeatsCategory]
glbLvlCategory <- merge(glbLvlCategory,
myget_category_stats(obs_df = glbObsOOB, mdl_id = glb_sel_mdl_id,
label="OOB"),
#by=glbFeatsCategory, all=TRUE) glb_ctgry-df already contains .n.OOB ?
all = TRUE)
row.names(glbLvlCategory) <- glbLvlCategory[, glbFeatsCategory]
if (any(grepl("OOB", glbMdlMetricsEval)))
print(orderBy(~-err.abs.OOB.mean, glbLvlCategory)) else
print(orderBy(~-err.abs.fit.mean, glbLvlCategory))
print(colSums(glbLvlCategory[, -grep(glbFeatsCategory, names(glbLvlCategory))]))
}
## P.cor.cut.fctr .n.OOB .n.Fit .n.Tst .freqRatio.Fit
## (0.7,1] (0.7,1] 2254 2254 4689 0.12711482
## (0.5,0.7] (0.5,0.7] 6686 6687 14117 0.37711482
## (0,0.5] (0,0.5] 8409 8411 23379 0.47434018
## (-1,0] (-1,0] 379 380 2390 0.02143018
## .freqRatio.OOB .freqRatio.Tst err.abs.fit.sum err.abs.fit.mean
## (0.7,1] 0.12714350 0.1051935 1095.14943 0.4858693
## (0.5,0.7] 0.37714350 0.3167022 2429.25533 0.3632803
## (0,0.5] 0.47433439 0.5244868 1578.85672 0.1877133
## (-1,0] 0.02137861 0.0536175 56.55873 0.1488388
## .n.fit err.abs.OOB.sum err.abs.OOB.mean
## (0.7,1] 2254 1089.01902 0.4831495
## (0.5,0.7] 6687 2428.80508 0.3632673
## (0,0.5] 8411 1573.25059 0.1870913
## (-1,0] 380 57.34759 0.1513129
## .n.OOB .n.Fit .n.Tst .freqRatio.Fit
## 17728.000000 17732.000000 44575.000000 1.000000
## .freqRatio.OOB .freqRatio.Tst err.abs.fit.sum err.abs.fit.mean
## 1.000000 1.000000 5159.820204 1.185702
## .n.fit err.abs.OOB.sum err.abs.OOB.mean
## 17732.000000 5148.422279 1.184821
write.csv(glbObsOOB[, c(glbFeatsId,
grep(glb_rsp_var, names(glbObsOOB), fixed=TRUE, value=TRUE))],
paste0(gsub(".", "_", paste0(glbOut$pfx, glb_sel_mdl_id), fixed=TRUE),
"_OOBobs.csv"), row.names=FALSE)
fit.models_2_chunk_df <-
myadd_chunk(NULL, "fit.models_2_bgn", label.minor = "teardown")
## label step_major step_minor label_minor bgn end elapsed
## 1 fit.models_2_bgn 1 0 teardown 428.15 NA NA
glb_chunks_df <- myadd_chunk(glb_chunks_df, "fit.models", major.inc=FALSE)
## label step_major step_minor label_minor bgn end elapsed
## 18 fit.models 8 2 2 397.878 428.171 30.293
## 19 fit.models 8 3 3 428.172 NA NA
# if (sum(is.na(glbObsAll$D.P.http)) > 0)
# stop("fit.models_3: Why is this happening ?")
#stop(here"); glb2Sav()
sync_glb_obs_df <- function() {
# Merge or cbind ?
for (col in setdiff(names(glbObsFit), names(glbObsTrn)))
glbObsTrn[glbObsTrn$.lcn == "Fit", col] <<- glbObsFit[, col]
for (col in setdiff(names(glbObsFit), names(glbObsAll)))
glbObsAll[glbObsAll$.lcn == "Fit", col] <<- glbObsFit[, col]
if (all(is.na(glbObsNew[, glb_rsp_var])))
for (col in setdiff(names(glbObsOOB), names(glbObsTrn)))
glbObsTrn[glbObsTrn$.lcn == "OOB", col] <<- glbObsOOB[, col]
for (col in setdiff(names(glbObsOOB), names(glbObsAll)))
glbObsAll[glbObsAll$.lcn == "OOB", col] <<- glbObsOOB[, col]
}
sync_glb_obs_df()
print(setdiff(names(glbObsNew), names(glbObsAll)))
## character(0)
replay.petrisim(pn=glb_analytics_pn,
replay.trans=(glb_analytics_avl_objs <- c(glb_analytics_avl_objs,
"model.selected")), flip_coord=TRUE)
## time trans "bgn " "fit.data.training.all " "predict.data.new " "end "
## 0.0000 multiple enabled transitions: data.training.all data.new model.selected firing: data.training.all
## 1.0000 1 2 1 0 0
## 1.0000 multiple enabled transitions: data.training.all data.new model.selected model.final data.training.all.prediction firing: data.new
## 2.0000 2 1 1 1 0
## 2.0000 multiple enabled transitions: data.training.all data.new model.selected model.final data.training.all.prediction data.new.prediction firing: model.selected
## 3.0000 3 0 2 1 0
glb_chunks_df <- myadd_chunk(glb_chunks_df, "fit.data.training", major.inc = TRUE)
## label step_major step_minor label_minor bgn end
## 19 fit.models 8 3 3 428.172 442.049
## 20 fit.data.training 9 0 0 442.049 NA
## elapsed
## 19 13.877
## 20 NA
9.0: fit data training#load(paste0(glb_inp_pfx, "dsk.RData"))
if (!is.null(glb_fin_mdl_id) && (glb_fin_mdl_id %in% names(glb_models_lst))) {
warning("Final model same as user selected model")
glb_fin_mdl <- glb_models_lst[[glb_fin_mdl_id]]
} else
# if (nrow(glbObsFit) + length(glbObsFitOutliers) == nrow(glbObsTrn))
if (!all(is.na(glbObsNew[, glb_rsp_var])))
{
warning("Final model same as glb_sel_mdl_id")
glb_fin_mdl_id <- paste0("Final.", glb_sel_mdl_id)
glb_fin_mdl <- glb_sel_mdl
glb_models_lst[[glb_fin_mdl_id]] <- glb_fin_mdl
} else {
if (grepl("RFE\\.X", names(glbMdlFamilies))) {
indep_vars <- myadjust_interaction_feats(subset(glb_feats_df,
!nzv & (exclude.as.feat != 1))[, "id"])
rfe_trn_results <-
myrun_rfe(glbObsTrn, indep_vars, glbRFESizes[["Final"]])
if (!isTRUE(all.equal(sort(predictors(rfe_trn_results)),
sort(predictors(rfe_fit_results))))) {
print("Diffs predictors(rfe_trn_results) vs. predictors(rfe_fit_results):")
print(setdiff(predictors(rfe_trn_results), predictors(rfe_fit_results)))
print("Diffs predictors(rfe_fit_results) vs. predictors(rfe_trn_results):")
print(setdiff(predictors(rfe_fit_results), predictors(rfe_trn_results)))
}
}
# }
if (grepl("Ensemble", glb_sel_mdl_id)) {
# Find which models are relevant
mdlimp_df <- subset(myget_feats_importance(glb_sel_mdl), imp > 5)
# Fit selected models on glbObsTrn
for (mdl_id in gsub(".prob", "",
gsub(mygetPredictIds(glb_rsp_var)$value, "", row.names(mdlimp_df), fixed = TRUE),
fixed = TRUE)) {
mdl_id_components <- unlist(strsplit(mdl_id, "[.]"))
mdlIdPfx <- paste0(c(head(mdl_id_components, -1), "Train"),
collapse = ".")
if (grepl("RFE\\.X\\.", mdlIdPfx))
mdlIndepVars <- myadjust_interaction_feats(myextract_actual_feats(
predictors(rfe_trn_results))) else
mdlIndepVars <- trim(unlist(
strsplit(glb_models_df[glb_models_df$id == mdl_id, "feats"], "[,]")))
ret_lst <-
myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = mdlIdPfx,
type = glb_model_type, tune.df = glbMdlTuneParams,
trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds,
trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = tail(mdl_id_components, 1))),
indep_vars = mdlIndepVars,
rsp_var = glb_rsp_var,
fit_df = glbObsTrn, OOB_df = NULL)
glbObsTrn <- glb_get_predictions(df = glbObsTrn,
mdl_id = tail(glb_models_df$id, 1),
rsp_var = glb_rsp_var,
prob_threshold_def =
subset(glb_models_df, id == mdl_id)$opt.prob.threshold.OOB)
glbObsNew <- glb_get_predictions(df = glbObsNew,
mdl_id = tail(glb_models_df$id, 1),
rsp_var = glb_rsp_var,
prob_threshold_def =
subset(glb_models_df, id == mdl_id)$opt.prob.threshold.OOB)
}
}
# "Final" model
if ((model_method <- glb_sel_mdl$method) == "custom")
# get actual method from the mdl_id
model_method <- tail(unlist(strsplit(glb_sel_mdl_id, "[.]")), 1)
if (grepl("Ensemble", glb_sel_mdl_id)) {
# Find which models are relevant
mdlimp_df <- subset(myget_feats_importance(glb_sel_mdl), imp > 5)
if (glb_is_classification && glb_is_binomial)
indep_vars_vctr <- gsub("(.*)\\.(.*)\\.prob", "\\1\\.Train\\.\\2\\.prob",
row.names(mdlimp_df)) else
indep_vars_vctr <- gsub("(.*)\\.(.*)", "\\1\\.Train\\.\\2",
row.names(mdlimp_df))
} else
if (grepl("RFE.X", glb_sel_mdl_id, fixed = TRUE)) {
indep_vars_vctr <- myextract_actual_feats(predictors(rfe_trn_results))
} else indep_vars_vctr <-
trim(unlist(strsplit(glb_models_df[glb_models_df$id ==
glb_sel_mdl_id
, "feats"], "[,]")))
if (!is.null(glb_preproc_methods) &&
((match_pos <- regexpr(gsub(".", "\\.",
paste(glb_preproc_methods, collapse = "|"),
fixed = TRUE), glb_sel_mdl_id)) != -1))
ths_preProcess <- str_sub(glb_sel_mdl_id, match_pos,
match_pos + attr(match_pos, "match.length") - 1) else
ths_preProcess <- NULL
mdl_id_pfx <- ifelse(grepl("Ensemble", glb_sel_mdl_id),
"Final.Ensemble", "Final")
trnobs_df <- glbObsTrn
if (!is.null(glbObsTrnOutliers[[mdl_id_pfx]])) {
trnobs_df <- glbObsTrn[!(glbObsTrn[, glbFeatsId] %in% glbObsTrnOutliers[[mdl_id_pfx]]), ]
print(sprintf("Outliers removed: %d", nrow(glbObsTrn) - nrow(trnobs_df)))
print(setdiff(glbObsTrn[, glbFeatsId], trnobs_df[, glbFeatsId]))
}
# Force fitting of Final.glm to identify outliers
method_vctr <- unique(c(myparseMdlId(glb_sel_mdl_id)$alg, glbMdlFamilies[["Final"]]))
for (method in method_vctr) {
#source("caret_nominalTrainWorkflow.R")
# glmnet requires at least 2 indep vars
if ((length(indep_vars_vctr) == 1) && (method %in% "glmnet"))
next
ret_lst <-
myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst = list(
id.prefix = mdl_id_pfx,
type = glb_model_type, trainControl.method = "repeatedcv",
trainControl.number = glb_rcv_n_folds,
trainControl.repeats = glb_rcv_n_repeats,
trainControl.classProbs = glb_is_classification,
trainControl.summaryFunction = glbMdlMetricSummaryFn,
trainControl.allowParallel = glbMdlAllowParallel,
train.metric = glbMdlMetricSummary,
train.maximize = glbMdlMetricMaximize,
train.method = method,
train.preProcess = ths_preProcess)),
indep_vars = indep_vars_vctr, rsp_var = glb_rsp_var,
fit_df = trnobs_df, OOB_df = NULL)
if ((length(method_vctr) == 1) || (method != "glm")) {
glb_fin_mdl <- glb_models_lst[[length(glb_models_lst)]]
glb_fin_mdl_id <- glb_models_df[length(glb_models_lst), "id"]
}
}
}
## Warning in if (grepl("RFE\\.X", names(glbMdlFamilies))) {: the condition
## has length > 1 and only the first element will be used
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Final##rcv#glmnet"
## [1] " indep_vars: P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm"
## [1] "myfit_mdl: setup complete: 0.706000 secs"
## + Fold1.Rep1: alpha=0.100, lambda=0.04419
## - Fold1.Rep1: alpha=0.100, lambda=0.04419
## + Fold1.Rep1: alpha=0.325, lambda=0.04419
## - Fold1.Rep1: alpha=0.325, lambda=0.04419
## + Fold1.Rep1: alpha=0.550, lambda=0.04419
## - Fold1.Rep1: alpha=0.550, lambda=0.04419
## + Fold1.Rep1: alpha=0.775, lambda=0.04419
## - Fold1.Rep1: alpha=0.775, lambda=0.04419
## + Fold1.Rep1: alpha=1.000, lambda=0.04419
## - Fold1.Rep1: alpha=1.000, lambda=0.04419
## + Fold2.Rep1: alpha=0.100, lambda=0.04419
## - Fold2.Rep1: alpha=0.100, lambda=0.04419
## + Fold2.Rep1: alpha=0.325, lambda=0.04419
## - Fold2.Rep1: alpha=0.325, lambda=0.04419
## + Fold2.Rep1: alpha=0.550, lambda=0.04419
## - Fold2.Rep1: alpha=0.550, lambda=0.04419
## + Fold2.Rep1: alpha=0.775, lambda=0.04419
## - Fold2.Rep1: alpha=0.775, lambda=0.04419
## + Fold2.Rep1: alpha=1.000, lambda=0.04419
## - Fold2.Rep1: alpha=1.000, lambda=0.04419
## + Fold3.Rep1: alpha=0.100, lambda=0.04419
## - Fold3.Rep1: alpha=0.100, lambda=0.04419
## + Fold3.Rep1: alpha=0.325, lambda=0.04419
## - Fold3.Rep1: alpha=0.325, lambda=0.04419
## + Fold3.Rep1: alpha=0.550, lambda=0.04419
## - Fold3.Rep1: alpha=0.550, lambda=0.04419
## + Fold3.Rep1: alpha=0.775, lambda=0.04419
## - Fold3.Rep1: alpha=0.775, lambda=0.04419
## + Fold3.Rep1: alpha=1.000, lambda=0.04419
## - Fold3.Rep1: alpha=1.000, lambda=0.04419
## + Fold1.Rep2: alpha=0.100, lambda=0.04419
## - Fold1.Rep2: alpha=0.100, lambda=0.04419
## + Fold1.Rep2: alpha=0.325, lambda=0.04419
## - Fold1.Rep2: alpha=0.325, lambda=0.04419
## + Fold1.Rep2: alpha=0.550, lambda=0.04419
## - Fold1.Rep2: alpha=0.550, lambda=0.04419
## + Fold1.Rep2: alpha=0.775, lambda=0.04419
## - Fold1.Rep2: alpha=0.775, lambda=0.04419
## + Fold1.Rep2: alpha=1.000, lambda=0.04419
## - Fold1.Rep2: alpha=1.000, lambda=0.04419
## + Fold2.Rep2: alpha=0.100, lambda=0.04419
## - Fold2.Rep2: alpha=0.100, lambda=0.04419
## + Fold2.Rep2: alpha=0.325, lambda=0.04419
## - Fold2.Rep2: alpha=0.325, lambda=0.04419
## + Fold2.Rep2: alpha=0.550, lambda=0.04419
## - Fold2.Rep2: alpha=0.550, lambda=0.04419
## + Fold2.Rep2: alpha=0.775, lambda=0.04419
## - Fold2.Rep2: alpha=0.775, lambda=0.04419
## + Fold2.Rep2: alpha=1.000, lambda=0.04419
## - Fold2.Rep2: alpha=1.000, lambda=0.04419
## + Fold3.Rep2: alpha=0.100, lambda=0.04419
## - Fold3.Rep2: alpha=0.100, lambda=0.04419
## + Fold3.Rep2: alpha=0.325, lambda=0.04419
## - Fold3.Rep2: alpha=0.325, lambda=0.04419
## + Fold3.Rep2: alpha=0.550, lambda=0.04419
## - Fold3.Rep2: alpha=0.550, lambda=0.04419
## + Fold3.Rep2: alpha=0.775, lambda=0.04419
## - Fold3.Rep2: alpha=0.775, lambda=0.04419
## + Fold3.Rep2: alpha=1.000, lambda=0.04419
## - Fold3.Rep2: alpha=1.000, lambda=0.04419
## + Fold1.Rep3: alpha=0.100, lambda=0.04419
## - Fold1.Rep3: alpha=0.100, lambda=0.04419
## + Fold1.Rep3: alpha=0.325, lambda=0.04419
## - Fold1.Rep3: alpha=0.325, lambda=0.04419
## + Fold1.Rep3: alpha=0.550, lambda=0.04419
## - Fold1.Rep3: alpha=0.550, lambda=0.04419
## + Fold1.Rep3: alpha=0.775, lambda=0.04419
## - Fold1.Rep3: alpha=0.775, lambda=0.04419
## + Fold1.Rep3: alpha=1.000, lambda=0.04419
## - Fold1.Rep3: alpha=1.000, lambda=0.04419
## + Fold2.Rep3: alpha=0.100, lambda=0.04419
## - Fold2.Rep3: alpha=0.100, lambda=0.04419
## + Fold2.Rep3: alpha=0.325, lambda=0.04419
## - Fold2.Rep3: alpha=0.325, lambda=0.04419
## + Fold2.Rep3: alpha=0.550, lambda=0.04419
## - Fold2.Rep3: alpha=0.550, lambda=0.04419
## + Fold2.Rep3: alpha=0.775, lambda=0.04419
## - Fold2.Rep3: alpha=0.775, lambda=0.04419
## + Fold2.Rep3: alpha=1.000, lambda=0.04419
## - Fold2.Rep3: alpha=1.000, lambda=0.04419
## + Fold3.Rep3: alpha=0.100, lambda=0.04419
## - Fold3.Rep3: alpha=0.100, lambda=0.04419
## + Fold3.Rep3: alpha=0.325, lambda=0.04419
## - Fold3.Rep3: alpha=0.325, lambda=0.04419
## + Fold3.Rep3: alpha=0.550, lambda=0.04419
## - Fold3.Rep3: alpha=0.550, lambda=0.04419
## + Fold3.Rep3: alpha=0.775, lambda=0.04419
## - Fold3.Rep3: alpha=0.775, lambda=0.04419
## + Fold3.Rep3: alpha=1.000, lambda=0.04419
## - Fold3.Rep3: alpha=1.000, lambda=0.04419
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 0.55, lambda = 9.52e-05 on full training set
## [1] "myfit_mdl: train complete: 75.090000 secs"
## Warning in myfit_mdl(mdl_specs_lst = myinit_mdl_specs_lst(mdl_specs_lst
## = list(id.prefix = mdl_id_pfx, : model's bestTune found at an extreme of
## tuneGrid for parameter: lambda
## Length Class Mode
## a0 63 -none- numeric
## beta 1323 dgCMatrix S4
## df 63 -none- numeric
## dim 2 -none- numeric
## lambda 63 -none- numeric
## dev.ratio 63 -none- numeric
## nulldev 1 -none- numeric
## npasses 1 -none- numeric
## jerr 1 -none- numeric
## offset 1 -none- logical
## classnames 2 -none- character
## call 5 -none- call
## nobs 1 -none- numeric
## lambdaOpt 1 -none- numeric
## xNames 21 -none- character
## problemType 1 -none- character
## tuneValue 2 data.frame list
## obsLevels 2 -none- character
## [1] "min lambda > lambdaOpt:"
## (Intercept) .rnorm
## -0.279260687 -0.014308688
## P.cor P.cor.cut.fctr(0,0.5]
## 2.636063538 -0.698303734
## P.cor.cut.fctr(0.5,0.7] P.cosSml
## -0.407112105 -2.340733886
## P.cosSml.cut.fctr(0.95,0.97] P.cosSml.cut.fctr(0.97,0.98]
## 0.043384468 0.055154426
## P.cosSml.cut.fctr(0.98,1] P.mnkSml.1.scld.cut.fctr(0.47,0.64]
## 0.282915362 -0.055192746
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] P.mnkSml.1.scld.cut.fctr(0.87,2.9]
## 0.005136963 0.100337684
## P.mnkSml.2.scld.cut.fctr(1.48,4.6] P.mnkSml.3.scld
## -0.057198615 0.275610838
## P.mnkSml.3.scld.cut.fctr(0.21,0.27] P.mnkSml.3.scld.cut.fctr(0.27,0.35]
## 0.028391736 0.036032355
## [1] "max lambda < lambdaOpt:"
## [1] "Feats mismatch between coefs_left & rght:"
## [1] "(Intercept)"
## [2] ".rnorm"
## [3] "P.cor"
## [4] "P.cor.cut.fctr(0,0.5]"
## [5] "P.cor.cut.fctr(0.5,0.7]"
## [6] "P.cor.cut.fctr(0.7,1]"
## [7] "P.cosSml"
## [8] "P.cosSml.cut.fctr(0.95,0.97]"
## [9] "P.cosSml.cut.fctr(0.97,0.98]"
## [10] "P.cosSml.cut.fctr(0.98,1]"
## [11] "P.mnkSml.1.scld"
## [12] "P.mnkSml.1.scld.cut.fctr(0.47,0.64]"
## [13] "P.mnkSml.1.scld.cut.fctr(0.64,0.87]"
## [14] "P.mnkSml.1.scld.cut.fctr(0.87,2.9]"
## [15] "P.mnkSml.2.scld"
## [16] "P.mnkSml.2.scld.cut.fctr(0.86,1.14]"
## [17] "P.mnkSml.2.scld.cut.fctr(1.14,1.48]"
## [18] "P.mnkSml.2.scld.cut.fctr(1.48,4.6]"
## [19] "P.mnkSml.3.scld"
## [20] "P.mnkSml.3.scld.cut.fctr(0.21,0.27]"
## [21] "P.mnkSml.3.scld.cut.fctr(0.27,0.35]"
## [22] "P.mnkSml.3.scld.cut.fctr(0.35,1.1]"
## [1] "myfit_mdl: train diagnostics complete: 75.642000 secs"
## Prediction
## Reference .none left_eye_center
## .none 17202 11226
## left_eye_center 2141 4891
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6230400 0.2022912 0.6179719 0.6280880 0.8016920
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 95.253000 secs"
## id
## 1 Final##rcv#glmnet
## feats
## 1 P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 25 74.302 1.203
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.5111543 0.9950049 0.02730375 0.7011493
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4225668 0.8034593
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.6179719 0.628088 0.0406098
## max.AccuracySD.fit max.KappaSD.fit
## 1 0.0009585158 0.005911364
## [1] "myfit_mdl: exit: 95.269000 secs"
## [1] "myfit_mdl: enter: 0.000000 secs"
## [1] "fitting model: Final##rcv#glm"
## [1] " indep_vars: P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm"
## [1] "myfit_mdl: setup complete: 0.693000 secs"
## + Fold1.Rep1: parameter=none
## - Fold1.Rep1: parameter=none
## + Fold2.Rep1: parameter=none
## - Fold2.Rep1: parameter=none
## + Fold3.Rep1: parameter=none
## - Fold3.Rep1: parameter=none
## + Fold1.Rep2: parameter=none
## - Fold1.Rep2: parameter=none
## + Fold2.Rep2: parameter=none
## - Fold2.Rep2: parameter=none
## + Fold3.Rep2: parameter=none
## - Fold3.Rep2: parameter=none
## + Fold1.Rep3: parameter=none
## - Fold1.Rep3: parameter=none
## + Fold2.Rep3: parameter=none
## - Fold2.Rep3: parameter=none
## + Fold3.Rep3: parameter=none
## - Fold3.Rep3: parameter=none
## Aggregating results
## Fitting final model on full training set
## [1] "myfit_mdl: train complete: 7.262000 secs"
##
## Call:
## NULL
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -1.3563 -0.7077 -0.5074 -0.3558 2.8269
##
## Coefficients:
## Estimate Std. Error z value
## (Intercept) 0.52948 0.55072 0.961
## .rnorm -0.01619 0.01406 -1.152
## P.cor 3.00997 0.19129 15.735
## `P.cor.cut.fctr(0,0.5]` -1.09264 0.15815 -6.909
## `P.cor.cut.fctr(0.5,0.7]` -0.88319 0.18530 -4.766
## `P.cor.cut.fctr(0.7,1]` -0.52058 0.20820 -2.500
## P.cosSml -2.99433 0.60548 -4.945
## `P.cosSml.cut.fctr(0.95,0.97]` 0.09472 0.05390 1.757
## `P.cosSml.cut.fctr(0.97,0.98]` 0.11486 0.06104 1.882
## `P.cosSml.cut.fctr(0.98,1]` 0.33147 0.06400 5.179
## P.mnkSml.1.scld -4.73104 1.09180 -4.333
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` 0.02634 0.09297 0.283
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` 0.22229 0.12302 1.807
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` 0.42488 0.15580 2.727
## P.mnkSml.2.scld 6.28552 1.50328 4.181
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` -0.12277 0.11401 -1.077
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` -0.26273 0.14791 -1.776
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` -0.42700 0.18484 -2.310
## P.mnkSml.3.scld -14.32468 3.72453 -3.846
## `P.mnkSml.3.scld.cut.fctr(0.21,0.27]` 0.07857 0.08563 0.918
## `P.mnkSml.3.scld.cut.fctr(0.27,0.35]` 0.08371 0.11328 0.739
## `P.mnkSml.3.scld.cut.fctr(0.35,1.1]` 0.03893 0.14726 0.264
## Pr(>|z|)
## (Intercept) 0.33633
## .rnorm 0.24937
## P.cor < 2e-16 ***
## `P.cor.cut.fctr(0,0.5]` 4.88e-12 ***
## `P.cor.cut.fctr(0.5,0.7]` 1.88e-06 ***
## `P.cor.cut.fctr(0.7,1]` 0.01241 *
## P.cosSml 7.60e-07 ***
## `P.cosSml.cut.fctr(0.95,0.97]` 0.07886 .
## `P.cosSml.cut.fctr(0.97,0.98]` 0.05987 .
## `P.cosSml.cut.fctr(0.98,1]` 2.23e-07 ***
## P.mnkSml.1.scld 1.47e-05 ***
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` 0.77693
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` 0.07077 .
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` 0.00639 **
## P.mnkSml.2.scld 2.90e-05 ***
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` 0.28153
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` 0.07568 .
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` 0.02088 *
## P.mnkSml.3.scld 0.00012 ***
## `P.mnkSml.3.scld.cut.fctr(0.21,0.27]` 0.35883
## `P.mnkSml.3.scld.cut.fctr(0.27,0.35]` 0.45993
## `P.mnkSml.3.scld.cut.fctr(0.35,1.1]` 0.79153
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 35322 on 35459 degrees of freedom
## Residual deviance: 32381 on 35438 degrees of freedom
## AIC: 32425
##
## Number of Fisher Scoring iterations: 5
##
## [1] "myfit_mdl: train diagnostics complete: 12.959000 secs"
## Prediction
## Reference .none left_eye_center
## .none 17576 10852
## left_eye_center 2212 4820
## Accuracy Kappa AccuracyLower AccuracyUpper AccuracyNull
## 0.6315849 0.2076775 0.6265395 0.6366087 0.8016920
## AccuracyPValue McnemarPValue
## 1.0000000 0.0000000
## [1] "myfit_mdl: predict complete: 33.800000 secs"
## id
## 1 Final##rcv#glm
## feats
## 1 P.cor.cut.fctr,P.cor,P.cosSml.cut.fctr,P.mnkSml.3.scld,P.mnkSml.2.scld,P.mnkSml.1.scld,P.cosSml,P.mnkSml.3.scld.cut.fctr,P.mnkSml.2.scld.cut.fctr,P.mnkSml.1.scld.cut.fctr,.rnorm
## max.nTuningRuns min.elapsedtime.everything min.elapsedtime.final
## 1 1 6.492 0.536
## max.AUCpROC.fit max.Sens.fit max.Spec.fit max.AUCROCR.fit
## 1 0.515412 0.9932813 0.03754266 0.7020281
## opt.prob.threshold.fit max.f.score.fit max.Accuracy.fit
## 1 0.2 0.4245948 0.8033841
## max.AccuracyLower.fit max.AccuracyUpper.fit max.Kappa.fit
## 1 0.6265395 0.6366087 0.04688878
## max.AccuracySD.fit max.KappaSD.fit
## 1 0.001130321 0.006556605
## [1] "myfit_mdl: exit: 33.815000 secs"
rm(ret_lst)
glb_chunks_df <- myadd_chunk(glb_chunks_df, "fit.data.training", major.inc=FALSE)
## label step_major step_minor label_minor bgn end
## 20 fit.data.training 9 0 0 442.049 572.536
## 21 fit.data.training 9 1 1 572.537 NA
## elapsed
## 20 130.487
## 21 NA
#stop(here"); glb2Sav()
if (glb_is_classification && glb_is_binomial)
prob_threshold <- glb_models_df[glb_models_df$id == glb_sel_mdl_id,
"opt.prob.threshold.OOB"] else
prob_threshold <- NULL
if (grepl("Ensemble", glb_fin_mdl_id)) {
# Get predictions for each model in ensemble; Outliers that have been moved to OOB might not have been predicted yet
mdlEnsembleComps <- unlist(str_split(subset(glb_models_df,
id == glb_fin_mdl_id)$feats, ","))
if (glb_is_classification && glb_is_binomial)
mdlEnsembleComps <- gsub("\\.prob$", "", mdlEnsembleComps)
mdlEnsembleComps <- gsub(paste0("^",
gsub(".", "\\.", mygetPredictIds(glb_rsp_var)$value, fixed = TRUE)),
"", mdlEnsembleComps)
for (mdl_id in mdlEnsembleComps) {
glbObsTrn <- glb_get_predictions(df = glbObsTrn, mdl_id = mdl_id,
rsp_var = glb_rsp_var,
prob_threshold_def = prob_threshold)
glbObsNew <- glb_get_predictions(df = glbObsNew, mdl_id = mdl_id,
rsp_var = glb_rsp_var,
prob_threshold_def = prob_threshold)
}
}
glbObsTrn <- glb_get_predictions(df = glbObsTrn, mdl_id = glb_fin_mdl_id,
rsp_var = glb_rsp_var,
prob_threshold_def = prob_threshold)
## Warning in glb_get_predictions(df = glbObsTrn, mdl_id = glb_fin_mdl_id, :
## Using default probability threshold: 0.2
glb_featsimp_df <- myget_feats_importance(mdl=glb_fin_mdl,
featsimp_df=glb_featsimp_df)
#glb_featsimp_df[, paste0(glb_fin_mdl_id, ".imp")] <- glb_featsimp_df$imp
print(glb_featsimp_df)
## All.X..rcv.glmnet.imp
## P.cor 72.58434846
## P.cosSml 100.00000000
## `P.cor.cut.fctr(0,0.5]` NA
## `P.cosSml.cut.fctr(0.98,1]` NA
## P.mnkSml.1.scld 6.89598837
## P.mnkSml.2.scld 2.56942459
## `P.cor.cut.fctr(0.5,0.7]` NA
## P.mnkSml.3.scld 14.79411470
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` NA
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` NA
## `P.cosSml.cut.fctr(0.97,0.98]` NA
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` NA
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` NA
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` NA
## `P.cor.cut.fctr(0.7,1]` NA
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` NA
## .rnorm 0.07196682
## `P.cosSml.cut.fctr(0.95,0.97]` NA
## `P.mnkSml.3.scld.cut.fctr(0.21,0.27]` NA
## `P.mnkSml.3.scld.cut.fctr(0.27,0.35]` NA
## `P.mnkSml.3.scld.cut.fctr(0.35,1.1]` NA
## P.cor.cut.fctr(0,0.5] 22.71645732
## P.cor.cut.fctr(0.5,0.7] 14.54534598
## P.cor.cut.fctr(0.7,1] 3.01855082
## P.cosSml.cut.fctr(0.95,0.97] 0.83425263
## P.cosSml.cut.fctr(0.97,0.98] 3.92472751
## P.cosSml.cut.fctr(0.98,1] 9.90390371
## P.mnkSml.1.scld.cut.fctr(0.47,0.64] 2.50301837
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] 4.93083499
## P.mnkSml.1.scld.cut.fctr(0.87,2.9] 10.87110697
## P.mnkSml.2.scld.cut.fctr(0.86,1.14] 2.10494834
## P.mnkSml.2.scld.cut.fctr(1.14,1.48] 4.55595258
## P.mnkSml.2.scld.cut.fctr(1.48,4.6] 9.83782520
## P.mnkSml.3.scld.cut.fctr(0.21,0.27] 0.33231166
## P.mnkSml.3.scld.cut.fctr(0.27,0.35] 0.67077294
## P.mnkSml.3.scld.cut.fctr(0.35,1.1] 0.00000000
## Final..rcv.glm.imp imp
## P.cor 100.000000 100.000000
## P.cosSml 44.024897 44.024897
## `P.cor.cut.fctr(0,0.5]` 43.925511 43.925511
## `P.cosSml.cut.fctr(0.98,1]` 39.732778 39.732778
## P.mnkSml.1.scld 31.259727 31.259727
## P.mnkSml.2.scld 30.035613 30.035613
## `P.cor.cut.fctr(0.5,0.7]` 28.220573 28.220573
## P.mnkSml.3.scld 27.490994 27.490994
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` 25.568643 25.568643
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` 20.957632 20.957632
## `P.cosSml.cut.fctr(0.97,0.98]` 18.360347 18.360347
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` 18.174044 18.174044
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` 15.443958 15.443958
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` 13.755848 13.755848
## `P.cor.cut.fctr(0.7,1]` 12.190916 12.190916
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` 11.534121 11.534121
## .rnorm 10.257827 10.257827
## `P.cosSml.cut.fctr(0.95,0.97]` 7.222091 7.222091
## `P.mnkSml.3.scld.cut.fctr(0.21,0.27]` 3.991109 3.991109
## `P.mnkSml.3.scld.cut.fctr(0.27,0.35]` 3.508511 3.508511
## `P.mnkSml.3.scld.cut.fctr(0.35,1.1]` 0.000000 0.000000
## P.cor.cut.fctr(0,0.5] NA NA
## P.cor.cut.fctr(0.5,0.7] NA NA
## P.cor.cut.fctr(0.7,1] NA NA
## P.cosSml.cut.fctr(0.95,0.97] NA NA
## P.cosSml.cut.fctr(0.97,0.98] NA NA
## P.cosSml.cut.fctr(0.98,1] NA NA
## P.mnkSml.1.scld.cut.fctr(0.47,0.64] NA NA
## P.mnkSml.1.scld.cut.fctr(0.64,0.87] NA NA
## P.mnkSml.1.scld.cut.fctr(0.87,2.9] NA NA
## P.mnkSml.2.scld.cut.fctr(0.86,1.14] NA NA
## P.mnkSml.2.scld.cut.fctr(1.14,1.48] NA NA
## P.mnkSml.2.scld.cut.fctr(1.48,4.6] NA NA
## P.mnkSml.3.scld.cut.fctr(0.21,0.27] NA NA
## P.mnkSml.3.scld.cut.fctr(0.27,0.35] NA NA
## P.mnkSml.3.scld.cut.fctr(0.35,1.1] NA NA
if (glb_is_classification && glb_is_binomial)
glb_analytics_diag_plots(obs_df=glbObsTrn, mdl_id=glb_fin_mdl_id,
prob_threshold=glb_models_df[glb_models_df$id == glb_sel_mdl_id,
"opt.prob.threshold.OOB"]) else
glb_analytics_diag_plots(obs_df=glbObsTrn, mdl_id=glb_fin_mdl_id)
## Warning in glb_analytics_diag_plots(obs_df = glbObsTrn, mdl_id =
## glb_fin_mdl_id, : Limiting important feature scatter plots to 5 out of 6
## [1] "Min/Max Boundaries: "
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 1 Train#1456#74#26 .none 0.01869817
## 2 Train#3231#67#38 left_eye_center NA
## 3 Train#4303#67#34 .none NA
## 4 Train#6676#62#38 .none 0.58751855
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 1 .none FALSE
## 2 <NA> NA
## 3 <NA> NA
## 4 left_eye_center TRUE
## label.fctr.All.X..rcv.glmnet.err.abs label.fctr.All.X..rcv.glmnet.is.acc
## 1 0.01869817 TRUE
## 2 NA NA
## 3 NA NA
## 4 0.58751855 FALSE
## label.fctr.Final..rcv.glmnet.prob label.fctr.Final..rcv.glmnet
## 1 0.01611337 .none
## 2 0.52553180 left_eye_center
## 3 0.22248420 left_eye_center
## 4 0.55799483 left_eye_center
## label.fctr.Final..rcv.glmnet.err label.fctr.Final..rcv.glmnet.err.abs
## 1 FALSE 0.01611337
## 2 FALSE 0.47446820
## 3 TRUE 0.22248420
## 4 TRUE 0.55799483
## label.fctr.Final..rcv.glmnet.is.acc
## 1 TRUE
## 2 TRUE
## 3 FALSE
## 4 FALSE
## label.fctr.Final..rcv.glmnet.accurate label.fctr.Final..rcv.glmnet.error
## 1 TRUE 0.0000000
## 2 TRUE 0.0000000
## 3 FALSE 0.0224842
## 4 FALSE 0.3579948
## .label
## 1 Train#1456#74#26
## 2 Train#3231#67#38
## 3 Train#4303#67#34
## 4 Train#6676#62#38
## [1] "Inaccurate: "
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 1 Train#1456#72#28 left_eye_center NA
## 2 Train#1549#78#47 left_eye_center 0.02366280
## 3 Train#2765#63#31 left_eye_center 0.03830011
## 4 Train#3254#66#31 left_eye_center 0.04277292
## 5 Train#2651#71#35 left_eye_center NA
## 6 Train#4049#68#37 left_eye_center NA
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 1 <NA> NA
## 2 .none TRUE
## 3 .none TRUE
## 4 .none TRUE
## 5 <NA> NA
## 6 <NA> NA
## label.fctr.All.X..rcv.glmnet.err.abs label.fctr.All.X..rcv.glmnet.is.acc
## 1 NA NA
## 2 0.9763372 FALSE
## 3 0.9616999 FALSE
## 4 0.9572271 FALSE
## 5 NA NA
## 6 NA NA
## label.fctr.Final..rcv.glmnet.prob label.fctr.Final..rcv.glmnet
## 1 0.01882391 .none
## 2 0.02157513 .none
## 3 0.03643718 .none
## 4 0.04026602 .none
## 5 0.04092532 .none
## 6 0.04146003 .none
## label.fctr.Final..rcv.glmnet.err label.fctr.Final..rcv.glmnet.err.abs
## 1 TRUE 0.9811761
## 2 TRUE 0.9784249
## 3 TRUE 0.9635628
## 4 TRUE 0.9597340
## 5 TRUE 0.9590747
## 6 TRUE 0.9585400
## label.fctr.Final..rcv.glmnet.is.acc
## 1 FALSE
## 2 FALSE
## 3 FALSE
## 4 FALSE
## 5 FALSE
## 6 FALSE
## label.fctr.Final..rcv.glmnet.accurate label.fctr.Final..rcv.glmnet.error
## 1 FALSE -0.1811761
## 2 FALSE -0.1784249
## 3 FALSE -0.1635628
## 4 FALSE -0.1597340
## 5 FALSE -0.1590747
## 6 FALSE -0.1585400
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 1405 Train#5247#67#38 left_eye_center NA
## 2274 Train#5612#65#40 .none NA
## 6791 Train#6593#62#39 .none NA
## 8527 Train#5174#69#41 .none NA
## 12139 Train#6286#69#38 .none 0.3948704
## 13112 Train#0938#67#41 .none NA
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 1405 <NA> NA
## 2274 <NA> NA
## 6791 <NA> NA
## 8527 <NA> NA
## 12139 left_eye_center TRUE
## 13112 <NA> NA
## label.fctr.All.X..rcv.glmnet.err.abs
## 1405 NA
## 2274 NA
## 6791 NA
## 8527 NA
## 12139 0.3948704
## 13112 NA
## label.fctr.All.X..rcv.glmnet.is.acc
## 1405 NA
## 2274 NA
## 6791 NA
## 8527 NA
## 12139 FALSE
## 13112 NA
## label.fctr.Final..rcv.glmnet.prob label.fctr.Final..rcv.glmnet
## 1405 0.1378575 .none
## 2274 0.2013868 left_eye_center
## 6791 0.2480874 left_eye_center
## 8527 0.2690787 left_eye_center
## 12139 0.4250775 left_eye_center
## 13112 0.4842721 left_eye_center
## label.fctr.Final..rcv.glmnet.err
## 1405 TRUE
## 2274 TRUE
## 6791 TRUE
## 8527 TRUE
## 12139 TRUE
## 13112 TRUE
## label.fctr.Final..rcv.glmnet.err.abs
## 1405 0.8621425
## 2274 0.2013868
## 6791 0.2480874
## 8527 0.2690787
## 12139 0.4250775
## 13112 0.4842721
## label.fctr.Final..rcv.glmnet.is.acc
## 1405 FALSE
## 2274 FALSE
## 6791 FALSE
## 8527 FALSE
## 12139 FALSE
## 13112 FALSE
## label.fctr.Final..rcv.glmnet.accurate
## 1405 FALSE
## 2274 FALSE
## 6791 FALSE
## 8527 FALSE
## 12139 FALSE
## 13112 FALSE
## label.fctr.Final..rcv.glmnet.error
## 1405 -0.062142455
## 2274 0.001386802
## 6791 0.048087407
## 8527 0.069078689
## 12139 0.225077490
## 13112 0.284272137
## ImageId.x.y label.fctr label.fctr.All.X..rcv.glmnet.prob
## 13362 Train#4974#60#38 .none 0.5545942
## 13363 Train#3574#59#35 .none 0.5591061
## 13364 Train#2833#64#40 .none NA
## 13365 Train#6676#62#38 .none 0.5875185
## 13366 Train#5757#65#39 .none NA
## 13367 Train#0665#62#43 .none 0.5569617
## label.fctr.All.X..rcv.glmnet label.fctr.All.X..rcv.glmnet.err
## 13362 left_eye_center TRUE
## 13363 left_eye_center TRUE
## 13364 <NA> NA
## 13365 left_eye_center TRUE
## 13366 <NA> NA
## 13367 left_eye_center TRUE
## label.fctr.All.X..rcv.glmnet.err.abs
## 13362 0.5545942
## 13363 0.5591061
## 13364 NA
## 13365 0.5875185
## 13366 NA
## 13367 0.5569617
## label.fctr.All.X..rcv.glmnet.is.acc
## 13362 FALSE
## 13363 FALSE
## 13364 NA
## 13365 FALSE
## 13366 NA
## 13367 FALSE
## label.fctr.Final..rcv.glmnet.prob label.fctr.Final..rcv.glmnet
## 13362 0.5509812 left_eye_center
## 13363 0.5531368 left_eye_center
## 13364 0.5538345 left_eye_center
## 13365 0.5579948 left_eye_center
## 13366 0.5609848 left_eye_center
## 13367 0.5624876 left_eye_center
## label.fctr.Final..rcv.glmnet.err
## 13362 TRUE
## 13363 TRUE
## 13364 TRUE
## 13365 TRUE
## 13366 TRUE
## 13367 TRUE
## label.fctr.Final..rcv.glmnet.err.abs
## 13362 0.5509812
## 13363 0.5531368
## 13364 0.5538345
## 13365 0.5579948
## 13366 0.5609848
## 13367 0.5624876
## label.fctr.Final..rcv.glmnet.is.acc
## 13362 FALSE
## 13363 FALSE
## 13364 FALSE
## 13365 FALSE
## 13366 FALSE
## 13367 FALSE
## label.fctr.Final..rcv.glmnet.accurate
## 13362 FALSE
## 13363 FALSE
## 13364 FALSE
## 13365 FALSE
## 13366 FALSE
## 13367 FALSE
## label.fctr.Final..rcv.glmnet.error
## 13362 0.3509812
## 13363 0.3531368
## 13364 0.3538345
## 13365 0.3579948
## 13366 0.3609848
## 13367 0.3624876
dsp_feats_vctr <- c(NULL)
for(var in grep(".imp", names(glb_feats_df), fixed=TRUE, value=TRUE))
dsp_feats_vctr <- union(dsp_feats_vctr,
glb_feats_df[!is.na(glb_feats_df[, var]), "id"])
# print(glbObsTrn[glbObsTrn$UniqueID %in% FN_OOB_ids,
# grep(glb_rsp_var, names(glbObsTrn), value=TRUE)])
print(setdiff(names(glbObsTrn), names(glbObsAll)))
## [1] "label.fctr.Final..rcv.glmnet.prob"
## [2] "label.fctr.Final..rcv.glmnet"
## [3] "label.fctr.Final..rcv.glmnet.err"
## [4] "label.fctr.Final..rcv.glmnet.err.abs"
## [5] "label.fctr.Final..rcv.glmnet.is.acc"
for (col in setdiff(names(glbObsTrn), names(glbObsAll)))
# Merge or cbind ?
glbObsAll[glbObsAll$.src == "Train", col] <- glbObsTrn[, col]
print(setdiff(names(glbObsFit), names(glbObsAll)))
## character(0)
print(setdiff(names(glbObsOOB), names(glbObsAll)))
## character(0)
for (col in setdiff(names(glbObsOOB), names(glbObsAll)))
# Merge or cbind ?
glbObsAll[glbObsAll$.lcn == "OOB", col] <- glbObsOOB[, col]
print(setdiff(names(glbObsNew), names(glbObsAll)))
## character(0)
#glb2Sav(); all.equal(savObsAll, glbObsAll); all.equal(sav_models_lst, glb_models_lst)
#load(file = paste0(glbOut$pfx, "dsk_knitr.RData"))
#cmpCols <- names(glbObsAll)[!grepl("\\.Final\\.", names(glbObsAll))]; all.equal(savObsAll[, cmpCols], glbObsAll[, cmpCols]); all.equal(savObsAll[, "H.P.http"], glbObsAll[, "H.P.http"]);
replay.petrisim(pn = glb_analytics_pn,
replay.trans = (glb_analytics_avl_objs <- c(glb_analytics_avl_objs,
"data.training.all.prediction","model.final")), flip_coord = TRUE)
## time trans "bgn " "fit.data.training.all " "predict.data.new " "end "
## 0.0000 multiple enabled transitions: data.training.all data.new model.selected firing: data.training.all
## 1.0000 1 2 1 0 0
## 1.0000 multiple enabled transitions: data.training.all data.new model.selected model.final data.training.all.prediction firing: data.new
## 2.0000 2 1 1 1 0
## 2.0000 multiple enabled transitions: data.training.all data.new model.selected model.final data.training.all.prediction data.new.prediction firing: model.selected
## 3.0000 3 0 2 1 0
## 3.0000 multiple enabled transitions: model.final data.training.all.prediction data.new.prediction firing: data.training.all.prediction
## 4.0000 5 0 1 1 1
## 4.0000 multiple enabled transitions: model.final data.training.all.prediction data.new.prediction firing: model.final
## 5.0000 4 0 0 2 1
glb_chunks_df <- myadd_chunk(glb_chunks_df, "predict.data.new", major.inc = TRUE)
## label step_major step_minor label_minor bgn end
## 21 fit.data.training 9 1 1 572.537 609.849
## 22 predict.data.new 10 0 0 609.850 NA
## elapsed
## 21 37.313
## 22 NA
10.0: predict data new## Warning in glb_get_predictions(obs_df, mdl_id = glb_fin_mdl_id, rsp_var =
## glb_rsp_var, : Using default probability threshold: 0.2
## Warning in glb_get_predictions(obs_df, mdl_id = glb_fin_mdl_id, rsp_var =
## glb_rsp_var, : Using default probability threshold: 0.2
## Warning in glb_analytics_diag_plots(obs_df = glbObsNew, mdl_id =
## glb_fin_mdl_id, : Limiting important feature scatter plots to 5 out of 6
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## Warning: Removed 44575 rows containing missing values (geom_point).
## NULL
## Loading required package: stringr
## Loading required package: tidyr
##
## Attaching package: 'tidyr'
## The following object is masked from 'package:Matrix':
##
## expand
## [1] "OOBobs label.fctr.All.X..rcv.glmnet left_eye_center: min < min of Train range: 1"
## ImageId.x.y label.fctr.All.X..rcv.glmnet P.cosSml
## 21743 Train#4303#67#34 left_eye_center 0.3965111
## id cor.y exclude.as.feat cor.y.abs cor.high.X
## P.cosSml P.cosSml 0.07127943 FALSE 0.07127943 <NA>
## freqRatio percentUnique zeroVar nzv is.cor.y.abs.low
## P.cosSml 1 95.815 FALSE FALSE FALSE
## interaction.feat shapiro.test.p.value rsp_var_raw id_var rsp_var
## P.cosSml NA 2.021949e-73 FALSE NA NA
## max min max.label.fctr..none
## P.cosSml 0.996751 0.3965111 0.9960466
## max.label.fctr.left_eye_center min.label.fctr..none
## P.cosSml 0.9957697 0.4260766
## min.label.fctr.left_eye_center
## P.cosSml 0.4354579
## max.label.fctr.All.X..rcv.glmnet..none
## P.cosSml 0.9890015
## max.label.fctr.All.X..rcv.glmnet.left_eye_center
## P.cosSml 0.996751
## min.label.fctr.All.X..rcv.glmnet..none
## P.cosSml 0.5352177
## min.label.fctr.All.X..rcv.glmnet.left_eye_center
## P.cosSml 0.3965111
## max.label.fctr.Final..rcv.glmnet..none
## P.cosSml 0.9890158
## max.label.fctr.Final..rcv.glmnet.left_eye_center
## P.cosSml 0.9967506
## min.label.fctr.Final..rcv.glmnet..none
## P.cosSml 0.5879432
## min.label.fctr.Final..rcv.glmnet.left_eye_center
## P.cosSml 0.6705268
## [1] "OOBobs label.fctr.All.X..rcv.glmnet left_eye_center: max > max of Train range: 15"
## ImageId.x.y label.fctr.All.X..rcv.glmnet P.cor P.cosSml
## 10385 Train#2053#67#34 left_eye_center 0.8594082 0.9961942
## 23347 Train#4624#63#37 left_eye_center 0.8958575 0.9961543
## 16198 Train#3192#65#39 left_eye_center 0.9097553 0.9896535
## 16393 Train#3231#67#38 left_eye_center 0.8869588 0.9967510
## 16978 Train#3348#64#38 left_eye_center 0.8459996 0.9958615
## 17878 Train#3529#65#39 left_eye_center 0.9157342 0.9960587
## 19296 Train#3813#69#42 left_eye_center 0.8805364 0.9959000
## 25596 Train#5075#67#40 left_eye_center 0.9127174 0.9959875
## 26111 Train#5178#67#40 left_eye_center 0.9212727 0.9828559
## 28261 Train#5608#65#40 left_eye_center 0.9088795 0.9872596
## 29631 Train#5882#66#38 left_eye_center 0.9269978 0.9883422
## 30516 Train#6059#68#41 left_eye_center 0.9207783 0.9948102
## 32136 Train#6383#66#38 left_eye_center 0.8807489 0.9965954
## 34698 Train#6897#62#38 left_eye_center 0.9062735 0.9913203
## 34928 Train#6943#64#36 left_eye_center 0.9132380 0.9817742
## id cor.y exclude.as.feat cor.y.abs cor.high.X
## P.cor P.cor 0.25721381 FALSE 0.25721381 P.cor.cut.fctr
## P.cosSml P.cosSml 0.07127943 FALSE 0.07127943 <NA>
## freqRatio percentUnique zeroVar nzv is.cor.y.abs.low
## P.cor 1 98.16131 FALSE FALSE FALSE
## P.cosSml 1 95.81500 FALSE FALSE FALSE
## interaction.feat shapiro.test.p.value rsp_var_raw id_var rsp_var
## P.cor NA 5.758562e-28 FALSE NA NA
## P.cosSml NA 2.021949e-73 FALSE NA NA
## max min max.label.fctr..none
## P.cor 0.9417233 -0.7026343 0.9285033
## P.cosSml 0.9967510 0.3965111 0.9960466
## max.label.fctr.left_eye_center min.label.fctr..none
## P.cor 0.9050761 -0.7026343
## P.cosSml 0.9957697 0.4260766
## min.label.fctr.left_eye_center
## P.cor -0.5489289
## P.cosSml 0.4354579
## max.label.fctr.All.X..rcv.glmnet..none
## P.cor 0.6247983
## P.cosSml 0.9890015
## max.label.fctr.All.X..rcv.glmnet.left_eye_center
## P.cor 0.9269978
## P.cosSml 0.9967510
## min.label.fctr.All.X..rcv.glmnet..none
## P.cor -0.6360407
## P.cosSml 0.5352177
## min.label.fctr.All.X..rcv.glmnet.left_eye_center
## P.cor -0.1600265
## P.cosSml 0.3965111
## max.label.fctr.Final..rcv.glmnet..none
## P.cor 0.5734128
## P.cosSml 0.9890158
## max.label.fctr.Final..rcv.glmnet.left_eye_center
## P.cor 0.9417233
## P.cosSml 0.9967506
## min.label.fctr.Final..rcv.glmnet..none
## P.cor -0.6451595
## P.cosSml 0.5879432
## min.label.fctr.Final..rcv.glmnet.left_eye_center
## P.cor 0.4448105
## P.cosSml 0.6705268
## [1] "OOBobs total range outliers: 16"
## [1] "newobs label.fctr.Final..rcv.glmnet left_eye_center: max > max of Train range: 7"
## ImageId.x.y label.fctr.Final..rcv.glmnet P.cor
## 55501 Test#0802#63#38 left_eye_center 0.9398131
## 55502 Test#0802#64#38 left_eye_center 0.9357323
## 58239 Test#0912#66#35 left_eye_center 0.9406116
## 58240 Test#0912#67#35 left_eye_center 0.9417233
## 71584 Test#1445#66#39 left_eye_center 0.9239820
## 73589 Test#1526#66#35 left_eye_center 0.9406116
## 73590 Test#1526#67#35 left_eye_center 0.9417233
## P.mnkSml.3.scld
## 55501 0.5648172
## 55502 0.5633811
## 58239 0.3521428
## 58240 0.3528755
## 71584 0.9954918
## 73589 0.3521428
## 73590 0.3528755
## id cor.y exclude.as.feat cor.y.abs
## P.cor P.cor 0.25721381 FALSE 0.25721381
## P.mnkSml.3.scld P.mnkSml.3.scld 0.09541747 FALSE 0.09541747
## cor.high.X freqRatio percentUnique zeroVar nzv
## P.cor P.cor.cut.fctr 1.000000 98.16131 FALSE FALSE
## P.mnkSml.3.scld <NA> 1.333333 97.87930 FALSE FALSE
## is.cor.y.abs.low interaction.feat shapiro.test.p.value
## P.cor FALSE NA 5.758562e-28
## P.mnkSml.3.scld FALSE NA 1.683507e-39
## rsp_var_raw id_var rsp_var max min
## P.cor FALSE NA NA 0.9417233 -0.7026343
## P.mnkSml.3.scld FALSE NA NA 0.9954918 0.1030109
## max.label.fctr..none max.label.fctr.left_eye_center
## P.cor 0.9285033 0.9269978
## P.mnkSml.3.scld 0.9908358 0.9874324
## min.label.fctr..none min.label.fctr.left_eye_center
## P.cor -0.7026343 -0.6360407
## P.mnkSml.3.scld 0.1030109 0.1051562
## max.label.fctr.All.X..rcv.glmnet..none
## P.cor 0.6247983
## P.mnkSml.3.scld 0.6434929
## max.label.fctr.All.X..rcv.glmnet.left_eye_center
## P.cor 0.9269978
## P.mnkSml.3.scld 0.9137446
## min.label.fctr.All.X..rcv.glmnet..none
## P.cor -0.6360407
## P.mnkSml.3.scld 0.1054423
## min.label.fctr.All.X..rcv.glmnet.left_eye_center
## P.cor -0.1600265
## P.mnkSml.3.scld 0.1055083
## max.label.fctr.Final..rcv.glmnet..none
## P.cor 0.5734128
## P.mnkSml.3.scld 0.6393835
## max.label.fctr.Final..rcv.glmnet.left_eye_center
## P.cor 0.9417233
## P.mnkSml.3.scld 0.9954918
## min.label.fctr.Final..rcv.glmnet..none
## P.cor -0.6451595
## P.mnkSml.3.scld 0.1047012
## min.label.fctr.Final..rcv.glmnet.left_eye_center
## P.cor 0.4448105
## P.mnkSml.3.scld 0.1068350
## [1] "newobs total range outliers: 7"
## [1] 0.2
## [1] "glb_sel_mdl_id: All.X##rcv#glmnet"
## [1] "glb_fin_mdl_id: Final##rcv#glmnet"
## [1] "Cross Validation issues:"
## MFO###myMFO_classfr Random###myrandom_classfr
## 0 0
## Max.cor.Y.rcv.1X1###glmnet Max.cor.Y##rcv#rpart
## 0 1
## max.Accuracy.OOB max.AUCROCR.OOB
## All.X##rcv#glmnet 0.6251128 0.7003196
## Interact.High.cor.Y##rcv#glmnet 0.5900835 0.6835589
## Low.cor.X##rcv#glmnet 0.5900835 0.6821994
## Max.cor.Y.rcv.1X1###glmnet 0.5900835 0.6809790
## Random###myrandom_classfr 0.1982739 0.5012493
## Max.cor.Y##rcv#rpart 0.1982739 0.5000000
## MFO###myMFO_classfr 0.1982739 0.5000000
## Final##rcv#glmnet NA NA
## Final##rcv#glm NA NA
## max.AUCpROC.OOB max.Accuracy.fit
## All.X##rcv#glmnet 0.5158859 0.8020528
## Interact.High.cor.Y##rcv#glmnet 0.5056058 0.8020339
## Low.cor.X##rcv#glmnet 0.5000000 0.8016580
## Max.cor.Y.rcv.1X1###glmnet 0.5000000 0.5900068
## Random###myrandom_classfr 0.5027819 0.1983420
## Max.cor.Y##rcv#rpart 0.5000000 0.8016580
## MFO###myMFO_classfr 0.5000000 0.1983420
## Final##rcv#glmnet NA 0.8034593
## Final##rcv#glm NA 0.8033841
## opt.prob.threshold.fit
## All.X##rcv#glmnet 0.2
## Interact.High.cor.Y##rcv#glmnet 0.2
## Low.cor.X##rcv#glmnet 0.2
## Max.cor.Y.rcv.1X1###glmnet 0.2
## Random###myrandom_classfr 0.1
## Max.cor.Y##rcv#rpart 0.1
## MFO###myMFO_classfr 0.1
## Final##rcv#glmnet 0.2
## Final##rcv#glm 0.2
## opt.prob.threshold.OOB
## All.X##rcv#glmnet 0.2
## Interact.High.cor.Y##rcv#glmnet 0.2
## Low.cor.X##rcv#glmnet 0.2
## Max.cor.Y.rcv.1X1###glmnet 0.2
## Random###myrandom_classfr 0.1
## Max.cor.Y##rcv#rpart 0.1
## MFO###myMFO_classfr 0.1
## Final##rcv#glmnet NA
## Final##rcv#glm NA
## [1] "All.X##rcv#glmnet OOB confusion matrix & accuracy: "
## Prediction
## Reference .none left_eye_center
## .none 8666 5547
## left_eye_center 1099 2416
## err.abs.fit.sum err.abs.OOB.sum err.abs.trn.sum err.abs.new.sum
## (0.7,1] 1095.14943 1089.01902 2184.4731 NA
## (0.5,0.7] 2429.25533 2428.80508 4864.3357 NA
## (0,0.5] 1578.85672 1573.25059 3160.3072 NA
## (-1,0] 56.55873 57.34759 107.6234 NA
## .freqRatio.Fit .freqRatio.OOB .freqRatio.Tst .n.Fit .n.New..none
## (0.7,1] 0.12711482 0.12714350 0.1051935 2254 NA
## (0.5,0.7] 0.37711482 0.37714350 0.3167022 6687 1952
## (0,0.5] 0.47434018 0.47433439 0.5244868 8411 23375
## (-1,0] 0.02143018 0.02137861 0.0536175 380 2390
## .n.New.left_eye_center .n.OOB .n.Trn..none
## (0.7,1] 4689 2254 2560
## (0.5,0.7] 12165 6686 10133
## (0,0.5] 4 8409 15041
## (-1,0] NA 379 694
## .n.Trn.left_eye_center .n.Tst .n.fit .n.new .n.trn
## (0.7,1] 1948 4689 2254 4689 4508
## (0.5,0.7] 3240 14117 6687 14117 13373
## (0,0.5] 1779 23379 8411 23379 16820
## (-1,0] 65 2390 380 2390 759
## err.abs.OOB.mean err.abs.fit.mean err.abs.new.mean
## (0.7,1] 0.4831495 0.4858693 NA
## (0.5,0.7] 0.3632673 0.3632803 NA
## (0,0.5] 0.1870913 0.1877133 NA
## (-1,0] 0.1513129 0.1488388 NA
## err.abs.trn.mean
## (0.7,1] 0.4845770
## (0.5,0.7] 0.3637430
## (0,0.5] 0.1878898
## (-1,0] 0.1417963
## err.abs.fit.sum err.abs.OOB.sum err.abs.trn.sum
## 5159.820204 5148.422279 10316.739399
## err.abs.new.sum .freqRatio.Fit .freqRatio.OOB
## NA 1.000000 1.000000
## .freqRatio.Tst .n.Fit .n.New..none
## 1.000000 17732.000000 NA
## .n.New.left_eye_center .n.OOB .n.Trn..none
## NA 17728.000000 28428.000000
## .n.Trn.left_eye_center .n.Tst .n.fit
## 7032.000000 44575.000000 17732.000000
## .n.new .n.trn err.abs.OOB.mean
## 44575.000000 35460.000000 1.184821
## err.abs.fit.mean err.abs.new.mean err.abs.trn.mean
## 1.185702 NA 1.178006
## [1] "Features Importance for selected models:"
## All.X..rcv.glmnet.imp
## P.cosSml 100.00000000
## P.cor 72.58434846
## P.cor.cut.fctr(0,0.5] 22.71645732
## P.mnkSml.3.scld 14.79411470
## P.cor.cut.fctr(0.5,0.7] 14.54534598
## P.mnkSml.1.scld.cut.fctr(0.87,2.9] 10.87110697
## P.mnkSml.1.scld 6.89598837
## P.mnkSml.2.scld 2.56942459
## .rnorm 0.07196682
## `P.cor.cut.fctr(0,0.5]` NA
## `P.cosSml.cut.fctr(0.98,1]` NA
## `P.cor.cut.fctr(0.5,0.7]` NA
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` NA
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` NA
## `P.cosSml.cut.fctr(0.97,0.98]` NA
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` NA
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` NA
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` NA
## `P.cor.cut.fctr(0.7,1]` NA
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` NA
## Final..rcv.glm.imp
## P.cosSml 44.02490
## P.cor 100.00000
## P.cor.cut.fctr(0,0.5] NA
## P.mnkSml.3.scld 27.49099
## P.cor.cut.fctr(0.5,0.7] NA
## P.mnkSml.1.scld.cut.fctr(0.87,2.9] NA
## P.mnkSml.1.scld 31.25973
## P.mnkSml.2.scld 30.03561
## .rnorm 10.25783
## `P.cor.cut.fctr(0,0.5]` 43.92551
## `P.cosSml.cut.fctr(0.98,1]` 39.73278
## `P.cor.cut.fctr(0.5,0.7]` 28.22057
## `P.mnkSml.1.scld.cut.fctr(0.87,2.9]` 25.56864
## `P.mnkSml.2.scld.cut.fctr(1.48,4.6]` 20.95763
## `P.cosSml.cut.fctr(0.97,0.98]` 18.36035
## `P.mnkSml.1.scld.cut.fctr(0.64,0.87]` 18.17404
## `P.mnkSml.2.scld.cut.fctr(1.14,1.48]` 15.44396
## `P.mnkSml.1.scld.cut.fctr(0.47,0.64]` 13.75585
## `P.cor.cut.fctr(0.7,1]` 12.19092
## `P.mnkSml.2.scld.cut.fctr(0.86,1.14]` 11.53412
## [1] "glbObsNew prediction stats:"
##
## .none left_eye_center
## 27717 16858
## label step_major step_minor label_minor bgn end
## 22 predict.data.new 10 0 0 609.850 658.719
## 23 display.session.info 11 0 0 658.719 NA
## elapsed
## 22 48.869
## 23 NA
Null Hypothesis (\(\sf{H_{0}}\)): mpg is not impacted by am_fctr.
The variance by am_fctr appears to be independent. #{r q1, cache=FALSE} # print(t.test(subset(cars_df, am_fctr == "automatic")$mpg, # subset(cars_df, am_fctr == "manual")$mpg, # var.equal=FALSE)$conf) # We reject the null hypothesis i.e. we have evidence to conclude that am_fctr impacts mpg (95% confidence). Manual transmission is better for miles per gallon versus automatic transmission.
## label step_major step_minor label_minor bgn
## 17 fit.models 8 1 1 229.924
## 16 fit.models 8 0 0 79.093
## 20 fit.data.training 9 0 0 442.049
## 22 predict.data.new 10 0 0 609.850
## 21 fit.data.training 9 1 1 572.537
## 18 fit.models 8 2 2 397.878
## 1 import.data 1 0 0 16.238
## 2 inspect.data 2 0 0 41.445
## 19 fit.models 8 3 3 428.172
## 15 select.features 7 0 0 70.694
## 3 scrub.data 2 1 1 62.165
## 14 partition.data.training 6 0 0 67.868
## 11 extract.features.end 3 6 6 66.195
## 12 manage.missing.data 4 0 0 67.072
## 13 cluster.data 5 0 0 67.750
## 10 extract.features.string 3 5 5 66.133
## 9 extract.features.text 3 4 4 66.074
## 7 extract.features.image 3 2 2 65.987
## 4 transform.data 2 2 2 65.893
## 8 extract.features.price 3 3 3 66.039
## 6 extract.features.datetime 3 1 1 65.953
## 5 extract.features 3 0 0 65.933
## end elapsed duration
## 17 397.877 167.954 167.953
## 16 229.923 150.831 150.830
## 20 572.536 130.487 130.487
## 22 658.719 48.869 48.869
## 21 609.849 37.313 37.312
## 18 428.171 30.293 30.293
## 1 41.444 25.206 25.206
## 2 62.165 20.720 20.720
## 19 442.049 13.877 13.877
## 15 79.093 8.399 8.399
## 3 65.892 3.728 3.727
## 14 70.693 2.825 2.825
## 11 67.071 0.877 0.876
## 12 67.749 0.677 0.677
## 13 67.868 0.118 0.118
## 10 66.194 0.061 0.061
## 9 66.132 0.058 0.058
## 7 66.038 0.052 0.051
## 4 65.933 0.040 0.040
## 8 66.074 0.035 0.035
## 6 65.986 0.033 0.033
## 5 65.953 0.020 0.020
## [1] "Total Elapsed Time: 658.719 secs"
## label step_major step_minor label_minor bgn end
## 5 fit.models_1_Final 1 4 glmnet 300.446 367.125
## 3 fit.models_1_All.X 1 2 glmnet 235.394 300.439
## 6 fit.models_1_Final 1 5 glm 367.126 397.800
## 7 fit.models_1_preProc 1 6 preProc 397.801 397.869
## 1 fit.models_1_bgn 1 0 setup 235.378 235.388
## 2 fit.models_1_All.X 1 1 setup 235.388 235.394
## 4 fit.models_1_Final 1 3 setup 300.439 300.445
## elapsed duration
## 5 66.679 66.679
## 3 65.045 65.045
## 6 30.674 30.674
## 7 0.068 0.068
## 1 0.010 0.010
## 2 0.006 0.006
## 4 0.006 0.006
## [1] "Total Elapsed Time: 397.869 secs"